diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..4731397 --- /dev/null +++ b/.gitignore @@ -0,0 +1,210 @@ +# --- Ignore some folders ---- # +data/ +resnet_classification_pytorch_vision/calculator_output/ + +# --- Ignore some files ---- # +/res_calc-time.json +/res_meas-time.json +/TAPO-VAR.json +/UTIL-VAR.json +/logs_and_results/2023* +/logs_and_results/*/*_logs_and_results/*/*_model/* +/logs_and_results/*/*_logs_and_results/*/term_logs/* +/logs_and_results/2_exp_resnet/batch-size-32/*_logs_and_results/*/term_logs/* +/logs_and_results/2_exp_resnet/batch-size-32/*_logs_and_results/*/*_model/* +/logs_and_results/current_logs_and_results_folder.txt +/TAPO-credentials.json + + +# logs_and_results/2_exp_resnet/batch-size-32/20230526-122217_logs_and_results/1/image_net_model/ +# logs_and_results/2_exp_resnet/batch-size-32/20230526-122217_logs_and_results/1/output_eco2ai.csv +# logs_and_results/2_exp_resnet/batch-size-32/20230526-122217_logs_and_results/1/term_logs/ +# logs_and_results/2_exp_resnet/batch-size-32/20230526-122217_logs_and_results/2/image_net_model/ +# logs_and_results/2_exp_resnet/batch-size-32/20230526-122217_logs_and_results/2/term_logs/ + +# --- Ignore logs ---- # +# *.log + +# --- Ignore .DS_Store ---- # +.DS_Store +.pth + +# ------------------------------------------------ # + +# gitignore template for Jupyter Notebooks: +# website: http://jupyter.org/ + +.ipynb_checkpoints +*/.ipynb_checkpoints/* + +# IPython +profile_default/ +ipython_config.py + +# Remove previous ipynb_checkpoints +# git rm -r .ipynb_checkpoints/ + +# ------------------------------------------------ # + +# gitignore template for Python: + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +# *.log # commented this because we keep the log files of carbontracker +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ diff --git a/GA_request_data.json b/GA_request_data.json new file mode 100644 index 0000000..8fd1400 --- /dev/null +++ b/GA_request_data.json @@ -0,0 +1,2808 @@ +{ + "output": "aggregate_data.data", + "changedPropIds": [ + "runTime_hour_input.value" + ], + "inputs": [ + { + "id": "versioned_data", + "property": "data", + "value": { + "version": "v2.2", + "cores_dict": { + "CPU": { + "A8-7680": 11.3, + "A9-9425 SoC": 7.5, + "AMD 7552": 4.2, + "AMD EPYC 7251": 15, + "AMD EPYC 7343": 11.9, + "AMD EPYC 7513": 6.3, + "Any": 12, + "Athlon 3000G": 17.5, + "Core 2 Quad Q6600": 23.8, + "Core i3-10100": 16.3, + "Core i3-10300": 15.5, + "Core i3-10320": 22.8, + "Core i3-10350K": 22.8, + "Core i3-9100": 16.3, + "Core i3-9100F": 16.3, + "Core i5-10400": 10.8, + "Core i5-10400F": 10.8, + "Core i5-10500": 10.8, + "Core i5-10600": 10.8, + "Core i5-10600K": 15.8, + "Core i5-3570K": 19.3, + "Core i5-4460": 21, + "Core i5-9400": 10.8, + "Core i5-9400F": 10.8, + "Core i5-9600KF": 15.8, + "Core i7-10700": 8.1, + "Core i7-10700K": 15.6, + "Core i7-4930K": 21.7, + "Core i7-6700K": 23.8, + "Core i7-8700K": 15.8, + "Core i7-9700F": 8.1, + "Core i7-9700K": 11.9, + "Core i9-10900K": 12.5, + "Core i9-10900KF": 10.5, + "Core i9-10900XE": 16.5, + "Core i9-10920XE": 13.8, + "Core i9-12900K": 7.8, + "Core i9-9900K": 11.9, + "FX-6300": 15.8, + "FX-8350": 15.6, + "Ryzen 3 2200G": 16.3, + "Ryzen 3 3200G": 16.3, + "Ryzen 3 3200U": 7.5, + "Ryzen 5 1600": 10.8, + "Ryzen 5 2600": 10.8, + "Ryzen 5 3400G": 16.3, + "Ryzen 5 3500U": 3.8, + "Ryzen 5 3600": 10.8, + "Ryzen 5 3600X": 15.8, + "Ryzen 7 2700X": 13.1, + "Ryzen 7 3700X": 8.1, + "Ryzen 7 3800X": 13.1, + "Ryzen 9 3900X": 10.4, + "Ryzen 9 3950X": 6.6, + "Ryzen Threadripper 2990WX": 7.8, + "Ryzen Threadripper 3990X": 4.4, + "Xeon E5-2660 v3": 10.5, + "Xeon E5-2665": 14.4, + "Xeon E5-2670": 14.4, + "Xeon E5-2670 v2": 11.5, + "Xeon E5-2680 v3": 10, + "Xeon E5-2683 v4": 7.5, + "Xeon E5-2690 v2": 13, + "Xeon E5-2690 v3": 11.3, + "Xeon E5-2695 v4": 6.7, + "Xeon E5-2697 v4": 8.1, + "Xeon E5-2699 v3": 8.1, + "Xeon E5-2699 v4": 6.6, + "Xeon E5-4610 v4": 10.5, + "Xeon E5-4620": 11.9, + "Xeon E5-4650L": 14.4, + "Xeon E7-8867 v3": 10.3, + "Xeon E7-8880 v4": 6.8, + "Xeon Gold 6142": 9.4, + "Xeon Gold 6148": 7.5, + "Xeon Gold 6248": 7.5, + "Xeon Gold 6252": 6.3, + "Xeon L5640 ": 10, + "Xeon Phi 5110P": 3.8, + "Xeon Platinum 9282": 7.1, + "Xeon X3430": 23.8, + "Xeon X5660": 15.8 + }, + "GPU": { + "NVIDIA Jetson AGX Xavier": 30, + "NVIDIA Tesla T4": 70, + "AMD RX480": 150, + "NVIDIA GTX 1080": 180, + "TPU v3": 200, + "Any": 200, + "NVIDIA RTX 2080": 215, + "NVIDIA RTX 2080 Ti": 250, + "NVIDIA GTX 1080 Ti": 250, + "NVIDIA Titan V": 250, + "TPU v2": 250, + "NVIDIA GTX TITAN X": 250, + "NVIDIA TITAN X Pascal": 250, + "NVIDIA Tesla P100 PCIe": 250, + "NVIDIA Tesla V100": 300, + "TPU v3 pod": 288000, + "NVIDIA A100 PCIe": 250, + "NVIDIA Tesla P4": 75, + "NVIDIA Tesla K80": 300 + } + }, + "pueDefault_dict": { + "Unknown": 1.67, + "gcp": 1.11, + "aws": 1.2, + "azure": 1.125 + }, + "CI_dict_byLoc": { + "WORLD": { + "continentName": "World", + "countryName": "Any", + "regionName": "Any", + "carbonIntensity": 475 + }, + "TW": { + "continentName": "Asia", + "countryName": "China", + "regionName": "Taiwan", + "carbonIntensity": 509 + }, + "IL": { + "continentName": "Asia", + "countryName": "Israel", + "regionName": "Any", + "carbonIntensity": 558 + }, + "ZA": { + "continentName": "Africa", + "countryName": "South Africa", + "regionName": "Any", + "carbonIntensity": 900.6 + }, + "CN": { + "continentName": "Asia", + "countryName": "China", + "regionName": "Any", + "carbonIntensity": 537.4 + }, + "CN-HK": { + "continentName": "Asia", + "countryName": "China", + "regionName": "Hong Kong (HK Electricity Company)", + "carbonIntensity": 710 + }, + "CN-HK2": { + "continentName": "Asia", + "countryName": "China", + "regionName": "Hong Kong (CLP Group)", + "carbonIntensity": 650 + }, + "IN": { + "continentName": "Asia", + "countryName": "India", + "regionName": "Any", + "carbonIntensity": 708.2 + }, + "ID": { + "continentName": "Asia", + "countryName": "Indonesia", + "regionName": "Any", + "carbonIntensity": 717.7 + }, + "JP": { + "continentName": "Asia", + "countryName": "Japan", + "regionName": "Any", + "carbonIntensity": 465.8 + }, + "KR": { + "continentName": "Asia", + "countryName": "Korea", + "regionName": "Any", + "carbonIntensity": 415.6 + }, + "SA": { + "continentName": "Asia", + "countryName": "Saudi Arabia", + "regionName": "Any", + "carbonIntensity": 505.9 + }, + "SG": { + "continentName": "Asia", + "countryName": "Singapore", + "regionName": "Any", + "carbonIntensity": 408 + }, + "TH": { + "continentName": "Asia", + "countryName": "Thailand", + "regionName": "Any", + "carbonIntensity": 481 + }, + "TR": { + "continentName": "Asia", + "countryName": "Turkey", + "regionName": "Any", + "carbonIntensity": 375 + }, + "AE": { + "continentName": "Asia", + "countryName": "United Arab Emirates", + "regionName": "Any", + "carbonIntensity": 417.89 + }, + "AT": { + "continentName": "Europe", + "countryName": "Austria", + "regionName": "Any", + "carbonIntensity": 111.18 + }, + "BE": { + "continentName": "Europe", + "countryName": "Belgium", + "regionName": "Any", + "carbonIntensity": 161.89 + }, + "BG": { + "continentName": "Europe", + "countryName": "Bulgaria", + "regionName": "Any", + "carbonIntensity": 372.12 + }, + "HR": { + "continentName": "Europe", + "countryName": "Croatia", + "regionName": "Any", + "carbonIntensity": 226.96 + }, + "CY": { + "continentName": "Europe", + "countryName": "Cyprus", + "regionName": "Any", + "carbonIntensity": 642.9 + }, + "CZ": { + "continentName": "Europe", + "countryName": "Czech Republic", + "regionName": "Any", + "carbonIntensity": 495.49 + }, + "DK": { + "continentName": "Europe", + "countryName": "Denmark", + "regionName": "Any", + "carbonIntensity": 142.52 + }, + "EE": { + "continentName": "Europe", + "countryName": "Estonia", + "regionName": "Any", + "carbonIntensity": 598.69 + }, + "FI": { + "continentName": "Europe", + "countryName": "Finland", + "regionName": "Any", + "carbonIntensity": 95.32 + }, + "FR": { + "continentName": "Europe", + "countryName": "France", + "regionName": "Any", + "carbonIntensity": 51.28 + }, + "DE": { + "continentName": "Europe", + "countryName": "Germany", + "regionName": "Any", + "carbonIntensity": 338.66 + }, + "GR": { + "continentName": "Europe", + "countryName": "Greece", + "regionName": "Any", + "carbonIntensity": 410.01 + }, + "HU": { + "continentName": "Europe", + "countryName": "Hungary", + "regionName": "Any", + "carbonIntensity": 243.75 + }, + "IS": { + "continentName": "Europe", + "countryName": "Iceland", + "regionName": "Any", + "carbonIntensity": 0.13 + }, + "IE": { + "continentName": "Europe", + "countryName": "Ireland", + "regionName": "Any", + "carbonIntensity": 335.99 + }, + "IT": { + "continentName": "Europe", + "countryName": "Italy", + "regionName": "Any", + "carbonIntensity": 323.84 + }, + "LV": { + "continentName": "Europe", + "countryName": "Latvia", + "regionName": "Any", + "carbonIntensity": 215.67 + }, + "LT": { + "continentName": "Europe", + "countryName": "Lithuania", + "regionName": "Any", + "carbonIntensity": 253.56 + }, + "LU": { + "continentName": "Europe", + "countryName": "Luxembourg", + "regionName": "Any", + "carbonIntensity": 101.36 + }, + "MT": { + "continentName": "Europe", + "countryName": "Malta", + "regionName": "Any", + "carbonIntensity": 390.62 + }, + "NL": { + "continentName": "Europe", + "countryName": "Netherlands", + "regionName": "Any", + "carbonIntensity": 374.34 + }, + "NO": { + "continentName": "Europe", + "countryName": "Norway", + "regionName": "Any", + "carbonIntensity": 7.62 + }, + "PL": { + "continentName": "Europe", + "countryName": "Poland", + "regionName": "Any", + "carbonIntensity": 759.62 + }, + "PT": { + "continentName": "Europe", + "countryName": "Portugal", + "regionName": "Any", + "carbonIntensity": 201.55 + }, + "RO": { + "continentName": "Europe", + "countryName": "Romania", + "regionName": "Any", + "carbonIntensity": 261.84 + }, + "RU": { + "continentName": "Europe", + "countryName": "Russian Federation", + "regionName": "Any", + "carbonIntensity": 310.2 + }, + "RS": { + "continentName": "Europe", + "countryName": "Serbia", + "regionName": "Any", + "carbonIntensity": 776.69 + }, + "SK": { + "continentName": "Europe", + "countryName": "Slovakia", + "regionName": "Any", + "carbonIntensity": 155.48 + }, + "SI": { + "continentName": "Europe", + "countryName": "Slovenia", + "regionName": "Any", + "carbonIntensity": 224.05 + }, + "ES": { + "continentName": "Europe", + "countryName": "Spain", + "regionName": "Any", + "carbonIntensity": 171.03 + }, + "SE": { + "continentName": "Europe", + "countryName": "Sweden", + "regionName": "Any", + "carbonIntensity": 5.67 + }, + "CH": { + "continentName": "Europe", + "countryName": "Switzerland", + "regionName": "Any", + "carbonIntensity": 11.52 + }, + "GB": { + "continentName": "Europe", + "countryName": "United Kingdom", + "regionName": "Any", + "carbonIntensity": 231.12 + }, + "CA": { + "continentName": "North America", + "countryName": "Canada", + "regionName": "Any", + "carbonIntensity": 120 + }, + "CA-AB": { + "continentName": "North America", + "countryName": "Canada", + "regionName": "Alberta", + "carbonIntensity": 670 + }, + "CA-BC": { + "continentName": "North America", + "countryName": "Canada", + "regionName": "British Columbia", + "carbonIntensity": 19.7 + }, + "CA-MT": { + "continentName": "North America", + "countryName": "Canada", + "regionName": "Manitoba", + "carbonIntensity": 1.3 + }, + "CA-NB": { + "continentName": "North America", + "countryName": "Canada", + "regionName": "New Brunswick", + "carbonIntensity": 270 + }, + "CA-NL": { + "continentName": "North America", + "countryName": "Canada", + "regionName": "Newfoundland and Labrador", + "carbonIntensity": 29 + }, + "CA-NS": { + "continentName": "North America", + "countryName": "Canada", + "regionName": "Nova Scotia", + "carbonIntensity": 810 + }, + "CA-NT": { + "continentName": "North America", + "countryName": "Canada", + "regionName": "Northwest Territories", + "carbonIntensity": 200 + }, + "CA-NU": { + "continentName": "North America", + "countryName": "Canada", + "regionName": "Nunavut", + "carbonIntensity": 900 + }, + "CA-ON": { + "continentName": "North America", + "countryName": "Canada", + "regionName": "Ontario", + "carbonIntensity": 30 + }, + "CA-PE": { + "continentName": "North America", + "countryName": "Canada", + "regionName": "Prince Edward Island", + "carbonIntensity": 2 + }, + "CA-QC": { + "continentName": "North America", + "countryName": "Canada", + "regionName": "Quebec", + "carbonIntensity": 1.5 + }, + "CA-SK": { + "continentName": "North America", + "countryName": "Canada", + "regionName": "Saskatchewan", + "carbonIntensity": 710 + }, + "CA-YT": { + "continentName": "North America", + "countryName": "Canada", + "regionName": "Yukon Territory", + "carbonIntensity": 111 + }, + "MX": { + "continentName": "North America", + "countryName": "Mexico", + "regionName": "Any", + "carbonIntensity": 431.4 + }, + "US": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Any", + "carbonIntensity": 423.94 + }, + "US-AK": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Alaska", + "carbonIntensity": 462.33 + }, + "US-AL": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Alabama", + "carbonIntensity": 344.37 + }, + "US-AR": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Arkansas", + "carbonIntensity": 454.4 + }, + "US-AZ": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Arizona", + "carbonIntensity": 351.99 + }, + "US-CA": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "California", + "carbonIntensity": 216.43 + }, + "US-CO": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Colorado", + "carbonIntensity": 582.34 + }, + "US-CT": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Connecticut", + "carbonIntensity": 253 + }, + "US-DC": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Washington DC", + "carbonIntensity": 382.68 + }, + "US-DE": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Delaware", + "carbonIntensity": 360.68 + }, + "US-FL": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Florida", + "carbonIntensity": 402.2 + }, + "US-GA": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Georgia", + "carbonIntensity": 345.58 + }, + "US-HI": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Hawaii", + "carbonIntensity": 731.21 + }, + "US-IA": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Iowa", + "carbonIntensity": 293.85 + }, + "US-ID": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Idaho", + "carbonIntensity": 101.89 + }, + "US-IL": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Illinois", + "carbonIntensity": 265.8 + }, + "US-IN": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Indiana", + "carbonIntensity": 740.02 + }, + "US-KS": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Kansas", + "carbonIntensity": 384.08 + }, + "US-KY": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Kentucky", + "carbonIntensity": 804.75 + }, + "US-LA": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Louisiana", + "carbonIntensity": 363.82 + }, + "US-MA": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Massachusetts", + "carbonIntensity": 420.32 + }, + "US-MD": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Maryland", + "carbonIntensity": 308.21 + }, + "US-ME": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Maine", + "carbonIntensity": 109 + }, + "US-MI": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Michigan", + "carbonIntensity": 448.08 + }, + "US-MN": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Minnesota", + "carbonIntensity": 367.93 + }, + "US-MO": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Missouri", + "carbonIntensity": 773 + }, + "US-MS": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Mississip", + "carbonIntensity": 427.07 + }, + "US-MT": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Montana", + "carbonIntensity": 435.87 + }, + "US-NC": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "North Carolina", + "carbonIntensity": 309.73 + }, + "US-ND": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "North Dakota", + "carbonIntensity": 663.05 + }, + "US-NE": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Nebraska", + "carbonIntensity": 573.51 + }, + "US-NH": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "New Hampshire", + "carbonIntensity": 118.44 + }, + "US-NJ": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "New Jersey", + "carbonIntensity": 235.1 + }, + "US-NM": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "New Mexico", + "carbonIntensity": 601.77 + }, + "US-NV": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Nevada", + "carbonIntensity": 342.25 + }, + "US-NY": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "New York", + "carbonIntensity": 199.01 + }, + "US-OH": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Ohio", + "carbonIntensity": 598.58 + }, + "US-OK": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Oklahoma", + "carbonIntensity": 338.44 + }, + "US-OR": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Oregon", + "carbonIntensity": 163.15 + }, + "US-PA": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Pennsylvania", + "carbonIntensity": 333.19 + }, + "US-RI": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Rhode Island", + "carbonIntensity": 395.27 + }, + "US-SC": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "South Carolina", + "carbonIntensity": 245.48 + }, + "US-SD": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "South Dakota", + "carbonIntensity": 162.69 + }, + "US-TN": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Tennessee", + "carbonIntensity": 272.89 + }, + "US-TX": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Texas", + "carbonIntensity": 409.16 + }, + "US-UT": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Utah", + "carbonIntensity": 747.82 + }, + "US-VA": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Virginia", + "carbonIntensity": 308 + }, + "US-VT": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Vermont", + "carbonIntensity": 14.45 + }, + "US-WA": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Washington", + "carbonIntensity": 101.84 + }, + "US-WI": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Wisconsin", + "carbonIntensity": 569.39 + }, + "US-WV": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "West Virginia", + "carbonIntensity": 919.34 + }, + "US-WY": { + "continentName": "North America", + "countryName": "United States of America", + "regionName": "Wyoming", + "carbonIntensity": 950.5 + }, + "AU": { + "continentName": "Oceania", + "countryName": "Australia", + "regionName": "Any", + "carbonIntensity": 840 + }, + "AU-ACT": { + "continentName": "Oceania", + "countryName": "Australia", + "regionName": "Australian Capital Territory", + "carbonIntensity": 870 + }, + "AU-NSW": { + "continentName": "Oceania", + "countryName": "Australia", + "regionName": "New South Wales", + "carbonIntensity": 870 + }, + "AU-NT": { + "continentName": "Oceania", + "countryName": "Australia", + "regionName": "Northern Territory", + "carbonIntensity": 620 + }, + "AU-NT2": { + "continentName": "Oceania", + "countryName": "Australia", + "regionName": "Northern Territory (Darwin Katherine Interconnected System)", + "carbonIntensity": 540 + }, + "AU-QLD": { + "continentName": "Oceania", + "countryName": "Australia", + "regionName": "Queensland", + "carbonIntensity": 920 + }, + "AU-SA": { + "continentName": "Oceania", + "countryName": "Australia", + "regionName": "South Australia", + "carbonIntensity": 420 + }, + "AU-TAS": { + "continentName": "Oceania", + "countryName": "Australia", + "regionName": "Tasmania", + "carbonIntensity": 180 + }, + "AU-VIC": { + "continentName": "Oceania", + "countryName": "Australia", + "regionName": "Victoria", + "carbonIntensity": 1060 + }, + "AU-WA1": { + "continentName": "Oceania", + "countryName": "Australia", + "regionName": "Western Australia (North Western Interconnected System)", + "carbonIntensity": 580 + }, + "AU-WA2": { + "continentName": "Oceania", + "countryName": "Australia", + "regionName": "Western Australia (South West Interconnected System)", + "carbonIntensity": 700 + }, + "NZ": { + "continentName": "Oceania", + "countryName": "New Zealand", + "regionName": "Any", + "carbonIntensity": 110.1 + }, + "AR": { + "continentName": "South America", + "countryName": "Argentina", + "regionName": "Any", + "carbonIntensity": 307 + }, + "BR": { + "continentName": "South America", + "countryName": "Brazil", + "regionName": "Any", + "carbonIntensity": 61.7 + }, + "UY": { + "continentName": "South America", + "countryName": "Uruguay", + "regionName": "Any", + "carbonIntensity": 129 + } + }, + "CI_dict_byName": { + "South America": { + "Argentina": { + "Any": { + "location": "AR", + "carbonIntensity": 307 + } + }, + "Brazil": { + "Any": { + "location": "BR", + "carbonIntensity": 61.7 + } + }, + "Uruguay": { + "Any": { + "location": "UY", + "carbonIntensity": 129 + } + } + }, + "Oceania": { + "Australia": { + "Northern Territory (Darwin Katherine Interconnected System)": { + "location": "AU-NT2", + "carbonIntensity": 540 + }, + "Western Australia (North Western Interconnected System)": { + "location": "AU-WA1", + "carbonIntensity": 580 + }, + "Tasmania": { + "location": "AU-TAS", + "carbonIntensity": 180 + }, + "South Australia": { + "location": "AU-SA", + "carbonIntensity": 420 + }, + "Queensland": { + "location": "AU-QLD", + "carbonIntensity": 920 + }, + "Western Australia (South West Interconnected System)": { + "location": "AU-WA2", + "carbonIntensity": 700 + }, + "Any": { + "location": "AU", + "carbonIntensity": 840 + }, + "Australian Capital Territory": { + "location": "AU-ACT", + "carbonIntensity": 870 + }, + "Northern Territory": { + "location": "AU-NT", + "carbonIntensity": 620 + }, + "Victoria": { + "location": "AU-VIC", + "carbonIntensity": 1060 + }, + "New South Wales": { + "location": "AU-NSW", + "carbonIntensity": 870 + } + }, + "New Zealand": { + "Any": { + "location": "NZ", + "carbonIntensity": 110.1 + } + } + }, + "Europe": { + "Greece": { + "Any": { + "location": "GR", + "carbonIntensity": 410.01 + } + }, + "Cyprus": { + "Any": { + "location": "CY", + "carbonIntensity": 642.9 + } + }, + "Romania": { + "Any": { + "location": "RO", + "carbonIntensity": 261.84 + } + }, + "France": { + "Any": { + "location": "FR", + "carbonIntensity": 51.28 + } + }, + "Luxembourg": { + "Any": { + "location": "LU", + "carbonIntensity": 101.36 + } + }, + "Bulgaria": { + "Any": { + "location": "BG", + "carbonIntensity": 372.12 + } + }, + "Slovenia": { + "Any": { + "location": "SI", + "carbonIntensity": 224.05 + } + }, + "Italy": { + "Any": { + "location": "IT", + "carbonIntensity": 323.84 + } + }, + "Austria": { + "Any": { + "location": "AT", + "carbonIntensity": 111.18 + } + }, + "Portugal": { + "Any": { + "location": "PT", + "carbonIntensity": 201.55 + } + }, + "Sweden": { + "Any": { + "location": "SE", + "carbonIntensity": 5.67 + } + }, + "Czech Republic": { + "Any": { + "location": "CZ", + "carbonIntensity": 495.49 + } + }, + "Denmark": { + "Any": { + "location": "DK", + "carbonIntensity": 142.52 + } + }, + "Serbia": { + "Any": { + "location": "RS", + "carbonIntensity": 776.69 + } + }, + "Hungary": { + "Any": { + "location": "HU", + "carbonIntensity": 243.75 + } + }, + "Iceland": { + "Any": { + "location": "IS", + "carbonIntensity": 0.13 + } + }, + "Latvia": { + "Any": { + "location": "LV", + "carbonIntensity": 215.67 + } + }, + "Slovakia": { + "Any": { + "location": "SK", + "carbonIntensity": 155.48 + } + }, + "Russian Federation": { + "Any": { + "location": "RU", + "carbonIntensity": 310.2 + } + }, + "Switzerland": { + "Any": { + "location": "CH", + "carbonIntensity": 11.52 + } + }, + "Croatia": { + "Any": { + "location": "HR", + "carbonIntensity": 226.96 + } + }, + "United Kingdom": { + "Any": { + "location": "GB", + "carbonIntensity": 231.12 + } + }, + "Norway": { + "Any": { + "location": "NO", + "carbonIntensity": 7.62 + } + }, + "Lithuania": { + "Any": { + "location": "LT", + "carbonIntensity": 253.56 + } + }, + "Germany": { + "Any": { + "location": "DE", + "carbonIntensity": 338.66 + } + }, + "Spain": { + "Any": { + "location": "ES", + "carbonIntensity": 171.03 + } + }, + "Ireland": { + "Any": { + "location": "IE", + "carbonIntensity": 335.99 + } + }, + "Netherlands": { + "Any": { + "location": "NL", + "carbonIntensity": 374.34 + } + }, + "Estonia": { + "Any": { + "location": "EE", + "carbonIntensity": 598.69 + } + }, + "Finland": { + "Any": { + "location": "FI", + "carbonIntensity": 95.32 + } + }, + "Belgium": { + "Any": { + "location": "BE", + "carbonIntensity": 161.89 + } + }, + "Malta": { + "Any": { + "location": "MT", + "carbonIntensity": 390.62 + } + }, + "Poland": { + "Any": { + "location": "PL", + "carbonIntensity": 759.62 + } + } + }, + "North America": { + "United States of America": { + "Hawaii": { + "location": "US-HI", + "carbonIntensity": 731.21 + }, + "Maine": { + "location": "US-ME", + "carbonIntensity": 109 + }, + "Indiana": { + "location": "US-IN", + "carbonIntensity": 740.02 + }, + "Montana": { + "location": "US-MT", + "carbonIntensity": 435.87 + }, + "New Mexico": { + "location": "US-NM", + "carbonIntensity": 601.77 + }, + "Kansas": { + "location": "US-KS", + "carbonIntensity": 384.08 + }, + "Wisconsin": { + "location": "US-WI", + "carbonIntensity": 569.39 + }, + "Massachusetts": { + "location": "US-MA", + "carbonIntensity": 420.32 + }, + "Virginia": { + "location": "US-VA", + "carbonIntensity": 308 + }, + "Minnesota": { + "location": "US-MN", + "carbonIntensity": 367.93 + }, + "South Dakota": { + "location": "US-SD", + "carbonIntensity": 162.69 + }, + "Alabama": { + "location": "US-AL", + "carbonIntensity": 344.37 + }, + "Missouri": { + "location": "US-MO", + "carbonIntensity": 773 + }, + "Nebraska": { + "location": "US-NE", + "carbonIntensity": 573.51 + }, + "Texas": { + "location": "US-TX", + "carbonIntensity": 409.16 + }, + "Connecticut": { + "location": "US-CT", + "carbonIntensity": 253 + }, + "Florida": { + "location": "US-FL", + "carbonIntensity": 402.2 + }, + "Rhode Island": { + "location": "US-RI", + "carbonIntensity": 395.27 + }, + "California": { + "location": "US-CA", + "carbonIntensity": 216.43 + }, + "Delaware": { + "location": "US-DE", + "carbonIntensity": 360.68 + }, + "Idaho": { + "location": "US-ID", + "carbonIntensity": 101.89 + }, + "New York": { + "location": "US-NY", + "carbonIntensity": 199.01 + }, + "Utah": { + "location": "US-UT", + "carbonIntensity": 747.82 + }, + "New Jersey": { + "location": "US-NJ", + "carbonIntensity": 235.1 + }, + "Louisiana": { + "location": "US-LA", + "carbonIntensity": 363.82 + }, + "Mississip": { + "location": "US-MS", + "carbonIntensity": 427.07 + }, + "Michigan": { + "location": "US-MI", + "carbonIntensity": 448.08 + }, + "New Hampshire": { + "location": "US-NH", + "carbonIntensity": 118.44 + }, + "Ohio": { + "location": "US-OH", + "carbonIntensity": 598.58 + }, + "North Carolina": { + "location": "US-NC", + "carbonIntensity": 309.73 + }, + "Nevada": { + "location": "US-NV", + "carbonIntensity": 342.25 + }, + "North Dakota": { + "location": "US-ND", + "carbonIntensity": 663.05 + }, + "Colorado": { + "location": "US-CO", + "carbonIntensity": 582.34 + }, + "Vermont": { + "location": "US-VT", + "carbonIntensity": 14.45 + }, + "Oregon": { + "location": "US-OR", + "carbonIntensity": 163.15 + }, + "South Carolina": { + "location": "US-SC", + "carbonIntensity": 245.48 + }, + "West Virginia": { + "location": "US-WV", + "carbonIntensity": 919.34 + }, + "Iowa": { + "location": "US-IA", + "carbonIntensity": 293.85 + }, + "Kentucky": { + "location": "US-KY", + "carbonIntensity": 804.75 + }, + "Maryland": { + "location": "US-MD", + "carbonIntensity": 308.21 + }, + "Oklahoma": { + "location": "US-OK", + "carbonIntensity": 338.44 + }, + "Pennsylvania": { + "location": "US-PA", + "carbonIntensity": 333.19 + }, + "Wyoming": { + "location": "US-WY", + "carbonIntensity": 950.5 + }, + "Alaska": { + "location": "US-AK", + "carbonIntensity": 462.33 + }, + "Arkansas": { + "location": "US-AR", + "carbonIntensity": 454.4 + }, + "Illinois": { + "location": "US-IL", + "carbonIntensity": 265.8 + }, + "Any": { + "location": "US", + "carbonIntensity": 423.94 + }, + "Washington": { + "location": "US-WA", + "carbonIntensity": 101.84 + }, + "Washington DC": { + "location": "US-DC", + "carbonIntensity": 382.68 + }, + "Georgia": { + "location": "US-GA", + "carbonIntensity": 345.58 + }, + "Tennessee": { + "location": "US-TN", + "carbonIntensity": 272.89 + }, + "Arizona": { + "location": "US-AZ", + "carbonIntensity": 351.99 + } + }, + "Mexico": { + "Any": { + "location": "MX", + "carbonIntensity": 431.4 + } + }, + "Canada": { + "Yukon Territory": { + "location": "CA-YT", + "carbonIntensity": 111 + }, + "Alberta": { + "location": "CA-AB", + "carbonIntensity": 670 + }, + "British Columbia": { + "location": "CA-BC", + "carbonIntensity": 19.7 + }, + "Nova Scotia": { + "location": "CA-NS", + "carbonIntensity": 810 + }, + "Nunavut": { + "location": "CA-NU", + "carbonIntensity": 900 + }, + "New Brunswick": { + "location": "CA-NB", + "carbonIntensity": 270 + }, + "Any": { + "location": "CA", + "carbonIntensity": 120 + }, + "Manitoba": { + "location": "CA-MT", + "carbonIntensity": 1.3 + }, + "Northwest Territories": { + "location": "CA-NT", + "carbonIntensity": 200 + }, + "Prince Edward Island": { + "location": "CA-PE", + "carbonIntensity": 2 + }, + "Quebec": { + "location": "CA-QC", + "carbonIntensity": 1.5 + }, + "Saskatchewan": { + "location": "CA-SK", + "carbonIntensity": 710 + }, + "Newfoundland and Labrador": { + "location": "CA-NL", + "carbonIntensity": 29 + }, + "Ontario": { + "location": "CA-ON", + "carbonIntensity": 30 + } + } + }, + "Asia": { + "India": { + "Any": { + "location": "IN", + "carbonIntensity": 708.2 + } + }, + "Singapore": { + "Any": { + "location": "SG", + "carbonIntensity": 408 + } + }, + "China": { + "Hong Kong (CLP Group)": { + "location": "CN-HK2", + "carbonIntensity": 650 + }, + "Any": { + "location": "CN", + "carbonIntensity": 537.4 + }, + "Taiwan": { + "location": "TW", + "carbonIntensity": 509 + }, + "Hong Kong (HK Electricity Company)": { + "location": "CN-HK", + "carbonIntensity": 710 + } + }, + "Saudi Arabia": { + "Any": { + "location": "SA", + "carbonIntensity": 505.9 + } + }, + "Israel": { + "Any": { + "location": "IL", + "carbonIntensity": 558 + } + }, + "United Arab Emirates": { + "Any": { + "location": "AE", + "carbonIntensity": 417.89 + } + }, + "Japan": { + "Any": { + "location": "JP", + "carbonIntensity": 465.8 + } + }, + "Turkey": { + "Any": { + "location": "TR", + "carbonIntensity": 375 + } + }, + "Indonesia": { + "Any": { + "location": "ID", + "carbonIntensity": 717.7 + } + }, + "Thailand": { + "Any": { + "location": "TH", + "carbonIntensity": 481 + } + }, + "Korea": { + "Any": { + "location": "KR", + "carbonIntensity": 415.6 + } + } + }, + "World": { + "Any": { + "Any": { + "location": "WORLD", + "carbonIntensity": 475 + } + } + }, + "Africa": { + "South Africa": { + "Any": { + "location": "ZA", + "carbonIntensity": 900.6 + } + } + } + }, + "providers_withoutDC": [ + "aws" + ], + "datacenters_dict_byProvider": { + "azure": { + "South Africa North": { + "Name": "South Africa North", + "name_unique": "azure / South Africa North", + "location": "ZA", + "PUE": null + }, + "South Africa West": { + "Name": "South Africa West", + "name_unique": "azure / South Africa West", + "location": "ZA", + "PUE": null + }, + "East Asia": { + "Name": "East Asia", + "name_unique": "azure / East Asia", + "location": "CN-HK", + "PUE": null + }, + "Southeast Asia": { + "Name": "Southeast Asia", + "name_unique": "azure / Southeast Asia", + "location": "SG", + "PUE": null + }, + "Australia Central": { + "Name": "Australia Central", + "name_unique": "azure / Australia Central", + "location": "AU-ACT", + "PUE": null + }, + "Australia Central 2": { + "Name": "Australia Central 2", + "name_unique": "azure / Australia Central 2", + "location": "AU-ACT", + "PUE": null + }, + "Australia East": { + "Name": "Australia East", + "name_unique": "azure / Australia East", + "location": "AU-NSW", + "PUE": null + }, + "Australia Southeast": { + "Name": "Australia Southeast", + "name_unique": "azure / Australia Southeast", + "location": "AU-VIC", + "PUE": null + }, + "Brazil South": { + "Name": "Brazil South", + "name_unique": "azure / Brazil South", + "location": "BR", + "PUE": null + }, + "Canada Central": { + "Name": "Canada Central", + "name_unique": "azure / Canada Central", + "location": "CA-ON", + "PUE": null + }, + "Canada East": { + "Name": "Canada East", + "name_unique": "azure / Canada East", + "location": "CA-QC", + "PUE": null + }, + "China East": { + "Name": "China East", + "name_unique": "azure / China East", + "location": "CN", + "PUE": null + }, + "China East 2": { + "Name": "China East 2", + "name_unique": "azure / China East 2", + "location": "CN", + "PUE": null + }, + "China North": { + "Name": "China North", + "name_unique": "azure / China North", + "location": "CN", + "PUE": null + }, + "China North 2": { + "Name": "China North 2", + "name_unique": "azure / China North 2", + "location": "CN", + "PUE": null + }, + "North Europe": { + "Name": "North Europe", + "name_unique": "azure / North Europe", + "location": "IE", + "PUE": null + }, + "West Europe": { + "Name": "West Europe", + "name_unique": "azure / West Europe", + "location": "NL", + "PUE": null + }, + "France Central": { + "Name": "France Central", + "name_unique": "azure / France Central", + "location": "FR", + "PUE": null + }, + "France South": { + "Name": "France South", + "name_unique": "azure / France South", + "location": "FR", + "PUE": null + }, + "Germany Central (Sovereign)": { + "Name": "Germany Central (Sovereign)", + "name_unique": "azure / Germany Central (Sovereign)", + "location": "DE", + "PUE": null + }, + "Germany North (Public)": { + "Name": "Germany North (Public)", + "name_unique": "azure / Germany North (Public)", + "location": "DE", + "PUE": null + }, + "Germany Northeastt (Sovereign)": { + "Name": "Germany Northeastt (Sovereign)", + "name_unique": "azure / Germany Northeastt (Sovereign)", + "location": "DE", + "PUE": null + }, + "Germany West Central (Public)": { + "Name": "Germany West Central (Public)", + "name_unique": "azure / Germany West Central (Public)", + "location": "DE", + "PUE": null + }, + "Central India": { + "Name": "Central India", + "name_unique": "azure / Central India", + "location": "IN", + "PUE": null + }, + "South India": { + "Name": "South India", + "name_unique": "azure / South India", + "location": "IN", + "PUE": null + }, + "West India": { + "Name": "West India", + "name_unique": "azure / West India", + "location": "IN", + "PUE": null + }, + "Israel Central": { + "Name": "Israel Central", + "name_unique": "azure / Israel Central", + "location": "IL", + "PUE": null + }, + "Italy North": { + "Name": "Italy North", + "name_unique": "azure / Italy North", + "location": "IT", + "PUE": null + }, + "Japan East": { + "Name": "Japan East", + "name_unique": "azure / Japan East", + "location": "JP", + "PUE": null + }, + "Japan West": { + "Name": "Japan West", + "name_unique": "azure / Japan West", + "location": "JP", + "PUE": null + }, + "Korea Central": { + "Name": "Korea Central", + "name_unique": "azure / Korea Central", + "location": "KR", + "PUE": null + }, + "Korea South": { + "Name": "Korea South", + "name_unique": "azure / Korea South", + "location": "KR", + "PUE": null + }, + "Mexico Central": { + "Name": "Mexico Central", + "name_unique": "azure / Mexico Central", + "location": "MX", + "PUE": null + }, + "New Zealand North": { + "Name": "New Zealand North", + "name_unique": "azure / New Zealand North", + "location": "NZ", + "PUE": null + }, + "Norway East": { + "Name": "Norway East", + "name_unique": "azure / Norway East", + "location": "NO", + "PUE": null + }, + "Norway West": { + "Name": "Norway West", + "name_unique": "azure / Norway West", + "location": "NO", + "PUE": null + }, + "Poland Central": { + "Name": "Poland Central", + "name_unique": "azure / Poland Central", + "location": "PL", + "PUE": null + }, + "Spain Central": { + "Name": "Spain Central", + "name_unique": "azure / Spain Central", + "location": "ES", + "PUE": null + }, + "Switzerland North": { + "Name": "Switzerland North", + "name_unique": "azure / Switzerland North", + "location": "CH", + "PUE": null + }, + "Switzerland West": { + "Name": "Switzerland West", + "name_unique": "azure / Switzerland West", + "location": "CH", + "PUE": null + }, + "UAE Central": { + "Name": "UAE Central", + "name_unique": "azure / UAE Central", + "location": "AE", + "PUE": null + }, + "UAE North": { + "Name": "UAE North", + "name_unique": "azure / UAE North", + "location": "AE", + "PUE": null + }, + "UK South": { + "Name": "UK South", + "name_unique": "azure / UK South", + "location": "GB", + "PUE": null + }, + "UK West": { + "Name": "UK West", + "name_unique": "azure / UK West", + "location": "GB", + "PUE": null + }, + "Central US": { + "Name": "Central US", + "name_unique": "azure / Central US", + "location": "US-IA", + "PUE": null + }, + "East US": { + "Name": "East US", + "name_unique": "azure / East US", + "location": "US-VA", + "PUE": null + }, + "East US 2": { + "Name": "East US 2", + "name_unique": "azure / East US 2", + "location": "US-VA", + "PUE": null + }, + "North Central US": { + "Name": "North Central US", + "name_unique": "azure / North Central US", + "location": "US-IL", + "PUE": null + }, + "South Central US": { + "Name": "South Central US", + "name_unique": "azure / South Central US", + "location": "US-TX", + "PUE": null + }, + "West Central US": { + "Name": "West Central US", + "name_unique": "azure / West Central US", + "location": "US-WY", + "PUE": null + }, + "West US": { + "Name": "West US", + "name_unique": "azure / West US", + "location": "US-CA", + "PUE": null + }, + "West US 2": { + "Name": "West US 2", + "name_unique": "azure / West US 2", + "location": "US-WA", + "PUE": null + }, + "West US 3": { + "Name": "West US 3", + "name_unique": "azure / West US 3", + "location": "US-AZ", + "PUE": null + } + }, + "gcp": { + "us-west1": { + "Name": "us-west1", + "name_unique": "gcp / us-west1", + "location": "US-OR", + "PUE": 1.11 + }, + "us-west2": { + "Name": "us-west2", + "name_unique": "gcp / us-west2", + "location": "US-CA", + "PUE": null + }, + "us-west3": { + "Name": "us-west3", + "name_unique": "gcp / us-west3", + "location": "US-UT", + "PUE": null + }, + "us-west4": { + "Name": "us-west4", + "name_unique": "gcp / us-west4", + "location": "US-NV", + "PUE": null + }, + "us-central1": { + "Name": "us-central1", + "name_unique": "gcp / us-central1", + "location": "US-IA", + "PUE": 1.11 + }, + "us-east1": { + "Name": "us-east1", + "name_unique": "gcp / us-east1", + "location": "US-SC", + "PUE": 1.11 + }, + "us-east4": { + "Name": "us-east4", + "name_unique": "gcp / us-east4", + "location": "US-VA", + "PUE": null + }, + "northamerica-northeast1": { + "Name": "northamerica-northeast1", + "name_unique": "gcp / northamerica-northeast1", + "location": "CA-QC", + "PUE": null + }, + "southamerica-east1": { + "Name": "southamerica-east1", + "name_unique": "gcp / southamerica-east1", + "location": "BR", + "PUE": null + }, + "europe-west1": { + "Name": "europe-west1", + "name_unique": "gcp / europe-west1", + "location": "BE", + "PUE": 1.08 + }, + "europe-west2": { + "Name": "europe-west2", + "name_unique": "gcp / europe-west2", + "location": "GB", + "PUE": null + }, + "europe-west3": { + "Name": "europe-west3", + "name_unique": "gcp / europe-west3", + "location": "DE", + "PUE": null + }, + "europe-west4": { + "Name": "europe-west4", + "name_unique": "gcp / europe-west4", + "location": "NL", + "PUE": null + }, + "europe-west6": { + "Name": "europe-west6", + "name_unique": "gcp / europe-west6", + "location": "CH", + "PUE": null + }, + "europe-north1": { + "Name": "europe-north1", + "name_unique": "gcp / europe-north1", + "location": "FI", + "PUE": 1.09 + }, + "asia-south1": { + "Name": "asia-south1", + "name_unique": "gcp / asia-south1", + "location": "IN", + "PUE": null + }, + "asia-southeast1": { + "Name": "asia-southeast1", + "name_unique": "gcp / asia-southeast1", + "location": "SG", + "PUE": 1.15 + }, + "asia-southeast2": { + "Name": "asia-southeast2", + "name_unique": "gcp / asia-southeast2", + "location": "ID", + "PUE": null + }, + "asia-east2": { + "Name": "asia-east2", + "name_unique": "gcp / asia-east2", + "location": "CN-HK", + "PUE": null + }, + "asia-east1": { + "Name": "asia-east1", + "name_unique": "gcp / asia-east1", + "location": "TW", + "PUE": 1.13 + }, + "asia-northeast1": { + "Name": "asia-northeast1", + "name_unique": "gcp / asia-northeast1", + "location": "JP", + "PUE": null + }, + "asia-northeast2": { + "Name": "asia-northeast2", + "name_unique": "gcp / asia-northeast2", + "location": "JP", + "PUE": null + }, + "asia-northeast3": { + "Name": "asia-northeast3", + "name_unique": "gcp / asia-northeast3", + "location": "KR", + "PUE": null + }, + "australia-southeast1": { + "Name": "australia-southeast1", + "name_unique": "gcp / australia-southeast1", + "location": "AU-NSW", + "PUE": null + } + } + }, + "datacenters_dict_byName": { + "gcp / us-west1": { + "provider": "gcp", + "Name": "us-west1", + "name_unique": "gcp / us-west1", + "location": "US-OR", + "PUE": 1.11 + }, + "gcp / us-west2": { + "provider": "gcp", + "Name": "us-west2", + "name_unique": "gcp / us-west2", + "location": "US-CA", + "PUE": null + }, + "gcp / us-west3": { + "provider": "gcp", + "Name": "us-west3", + "name_unique": "gcp / us-west3", + "location": "US-UT", + "PUE": null + }, + "gcp / us-west4": { + "provider": "gcp", + "Name": "us-west4", + "name_unique": "gcp / us-west4", + "location": "US-NV", + "PUE": null + }, + "gcp / us-central1": { + "provider": "gcp", + "Name": "us-central1", + "name_unique": "gcp / us-central1", + "location": "US-IA", + "PUE": 1.11 + }, + "gcp / us-east1": { + "provider": "gcp", + "Name": "us-east1", + "name_unique": "gcp / us-east1", + "location": "US-SC", + "PUE": 1.11 + }, + "gcp / us-east4": { + "provider": "gcp", + "Name": "us-east4", + "name_unique": "gcp / us-east4", + "location": "US-VA", + "PUE": null + }, + "gcp / northamerica-northeast1": { + "provider": "gcp", + "Name": "northamerica-northeast1", + "name_unique": "gcp / northamerica-northeast1", + "location": "CA-QC", + "PUE": null + }, + "gcp / southamerica-east1": { + "provider": "gcp", + "Name": "southamerica-east1", + "name_unique": "gcp / southamerica-east1", + "location": "BR", + "PUE": null + }, + "gcp / europe-west1": { + "provider": "gcp", + "Name": "europe-west1", + "name_unique": "gcp / europe-west1", + "location": "BE", + "PUE": 1.08 + }, + "gcp / europe-west2": { + "provider": "gcp", + "Name": "europe-west2", + "name_unique": "gcp / europe-west2", + "location": "GB", + "PUE": null + }, + "gcp / europe-west3": { + "provider": "gcp", + "Name": "europe-west3", + "name_unique": "gcp / europe-west3", + "location": "DE", + "PUE": null + }, + "gcp / europe-west4": { + "provider": "gcp", + "Name": "europe-west4", + "name_unique": "gcp / europe-west4", + "location": "NL", + "PUE": null + }, + "gcp / europe-west6": { + "provider": "gcp", + "Name": "europe-west6", + "name_unique": "gcp / europe-west6", + "location": "CH", + "PUE": null + }, + "gcp / europe-north1": { + "provider": "gcp", + "Name": "europe-north1", + "name_unique": "gcp / europe-north1", + "location": "FI", + "PUE": 1.09 + }, + "gcp / asia-south1": { + "provider": "gcp", + "Name": "asia-south1", + "name_unique": "gcp / asia-south1", + "location": "IN", + "PUE": null + }, + "gcp / asia-southeast1": { + "provider": "gcp", + "Name": "asia-southeast1", + "name_unique": "gcp / asia-southeast1", + "location": "SG", + "PUE": 1.15 + }, + "gcp / asia-southeast2": { + "provider": "gcp", + "Name": "asia-southeast2", + "name_unique": "gcp / asia-southeast2", + "location": "ID", + "PUE": null + }, + "gcp / asia-east2": { + "provider": "gcp", + "Name": "asia-east2", + "name_unique": "gcp / asia-east2", + "location": "CN-HK", + "PUE": null + }, + "gcp / asia-east1": { + "provider": "gcp", + "Name": "asia-east1", + "name_unique": "gcp / asia-east1", + "location": "TW", + "PUE": 1.13 + }, + "gcp / asia-northeast1": { + "provider": "gcp", + "Name": "asia-northeast1", + "name_unique": "gcp / asia-northeast1", + "location": "JP", + "PUE": null + }, + "gcp / asia-northeast2": { + "provider": "gcp", + "Name": "asia-northeast2", + "name_unique": "gcp / asia-northeast2", + "location": "JP", + "PUE": null + }, + "gcp / asia-northeast3": { + "provider": "gcp", + "Name": "asia-northeast3", + "name_unique": "gcp / asia-northeast3", + "location": "KR", + "PUE": null + }, + "gcp / australia-southeast1": { + "provider": "gcp", + "Name": "australia-southeast1", + "name_unique": "gcp / australia-southeast1", + "location": "AU-NSW", + "PUE": null + }, + "azure / South Africa North": { + "provider": "azure", + "Name": "South Africa North", + "name_unique": "azure / South Africa North", + "location": "ZA", + "PUE": null + }, + "azure / South Africa West": { + "provider": "azure", + "Name": "South Africa West", + "name_unique": "azure / South Africa West", + "location": "ZA", + "PUE": null + }, + "azure / East Asia": { + "provider": "azure", + "Name": "East Asia", + "name_unique": "azure / East Asia", + "location": "CN-HK", + "PUE": null + }, + "azure / Southeast Asia": { + "provider": "azure", + "Name": "Southeast Asia", + "name_unique": "azure / Southeast Asia", + "location": "SG", + "PUE": null + }, + "azure / Australia Central": { + "provider": "azure", + "Name": "Australia Central", + "name_unique": "azure / Australia Central", + "location": "AU-ACT", + "PUE": null + }, + "azure / Australia Central 2": { + "provider": "azure", + "Name": "Australia Central 2", + "name_unique": "azure / Australia Central 2", + "location": "AU-ACT", + "PUE": null + }, + "azure / Australia East": { + "provider": "azure", + "Name": "Australia East", + "name_unique": "azure / Australia East", + "location": "AU-NSW", + "PUE": null + }, + "azure / Australia Southeast": { + "provider": "azure", + "Name": "Australia Southeast", + "name_unique": "azure / Australia Southeast", + "location": "AU-VIC", + "PUE": null + }, + "azure / Brazil South": { + "provider": "azure", + "Name": "Brazil South", + "name_unique": "azure / Brazil South", + "location": "BR", + "PUE": null + }, + "azure / Canada Central": { + "provider": "azure", + "Name": "Canada Central", + "name_unique": "azure / Canada Central", + "location": "CA-ON", + "PUE": null + }, + "azure / Canada East": { + "provider": "azure", + "Name": "Canada East", + "name_unique": "azure / Canada East", + "location": "CA-QC", + "PUE": null + }, + "azure / China East": { + "provider": "azure", + "Name": "China East", + "name_unique": "azure / China East", + "location": "CN", + "PUE": null + }, + "azure / China East 2": { + "provider": "azure", + "Name": "China East 2", + "name_unique": "azure / China East 2", + "location": "CN", + "PUE": null + }, + "azure / China North": { + "provider": "azure", + "Name": "China North", + "name_unique": "azure / China North", + "location": "CN", + "PUE": null + }, + "azure / China North 2": { + "provider": "azure", + "Name": "China North 2", + "name_unique": "azure / China North 2", + "location": "CN", + "PUE": null + }, + "azure / North Europe": { + "provider": "azure", + "Name": "North Europe", + "name_unique": "azure / North Europe", + "location": "IE", + "PUE": null + }, + "azure / West Europe": { + "provider": "azure", + "Name": "West Europe", + "name_unique": "azure / West Europe", + "location": "NL", + "PUE": null + }, + "azure / France Central": { + "provider": "azure", + "Name": "France Central", + "name_unique": "azure / France Central", + "location": "FR", + "PUE": null + }, + "azure / France South": { + "provider": "azure", + "Name": "France South", + "name_unique": "azure / France South", + "location": "FR", + "PUE": null + }, + "azure / Germany Central (Sovereign)": { + "provider": "azure", + "Name": "Germany Central (Sovereign)", + "name_unique": "azure / Germany Central (Sovereign)", + "location": "DE", + "PUE": null + }, + "azure / Germany North (Public)": { + "provider": "azure", + "Name": "Germany North (Public)", + "name_unique": "azure / Germany North (Public)", + "location": "DE", + "PUE": null + }, + "azure / Germany Northeastt (Sovereign)": { + "provider": "azure", + "Name": "Germany Northeastt (Sovereign)", + "name_unique": "azure / Germany Northeastt (Sovereign)", + "location": "DE", + "PUE": null + }, + "azure / Germany West Central (Public)": { + "provider": "azure", + "Name": "Germany West Central (Public)", + "name_unique": "azure / Germany West Central (Public)", + "location": "DE", + "PUE": null + }, + "azure / Central India": { + "provider": "azure", + "Name": "Central India", + "name_unique": "azure / Central India", + "location": "IN", + "PUE": null + }, + "azure / South India": { + "provider": "azure", + "Name": "South India", + "name_unique": "azure / South India", + "location": "IN", + "PUE": null + }, + "azure / West India": { + "provider": "azure", + "Name": "West India", + "name_unique": "azure / West India", + "location": "IN", + "PUE": null + }, + "azure / Israel Central": { + "provider": "azure", + "Name": "Israel Central", + "name_unique": "azure / Israel Central", + "location": "IL", + "PUE": null + }, + "azure / Italy North": { + "provider": "azure", + "Name": "Italy North", + "name_unique": "azure / Italy North", + "location": "IT", + "PUE": null + }, + "azure / Japan East": { + "provider": "azure", + "Name": "Japan East", + "name_unique": "azure / Japan East", + "location": "JP", + "PUE": null + }, + "azure / Japan West": { + "provider": "azure", + "Name": "Japan West", + "name_unique": "azure / Japan West", + "location": "JP", + "PUE": null + }, + "azure / Korea Central": { + "provider": "azure", + "Name": "Korea Central", + "name_unique": "azure / Korea Central", + "location": "KR", + "PUE": null + }, + "azure / Korea South": { + "provider": "azure", + "Name": "Korea South", + "name_unique": "azure / Korea South", + "location": "KR", + "PUE": null + }, + "azure / Mexico Central": { + "provider": "azure", + "Name": "Mexico Central", + "name_unique": "azure / Mexico Central", + "location": "MX", + "PUE": null + }, + "azure / New Zealand North": { + "provider": "azure", + "Name": "New Zealand North", + "name_unique": "azure / New Zealand North", + "location": "NZ", + "PUE": null + }, + "azure / Norway East": { + "provider": "azure", + "Name": "Norway East", + "name_unique": "azure / Norway East", + "location": "NO", + "PUE": null + }, + "azure / Norway West": { + "provider": "azure", + "Name": "Norway West", + "name_unique": "azure / Norway West", + "location": "NO", + "PUE": null + }, + "azure / Poland Central": { + "provider": "azure", + "Name": "Poland Central", + "name_unique": "azure / Poland Central", + "location": "PL", + "PUE": null + }, + "azure / Spain Central": { + "provider": "azure", + "Name": "Spain Central", + "name_unique": "azure / Spain Central", + "location": "ES", + "PUE": null + }, + "azure / Switzerland North": { + "provider": "azure", + "Name": "Switzerland North", + "name_unique": "azure / Switzerland North", + "location": "CH", + "PUE": null + }, + "azure / Switzerland West": { + "provider": "azure", + "Name": "Switzerland West", + "name_unique": "azure / Switzerland West", + "location": "CH", + "PUE": null + }, + "azure / UAE Central": { + "provider": "azure", + "Name": "UAE Central", + "name_unique": "azure / UAE Central", + "location": "AE", + "PUE": null + }, + "azure / UAE North": { + "provider": "azure", + "Name": "UAE North", + "name_unique": "azure / UAE North", + "location": "AE", + "PUE": null + }, + "azure / UK South": { + "provider": "azure", + "Name": "UK South", + "name_unique": "azure / UK South", + "location": "GB", + "PUE": null + }, + "azure / UK West": { + "provider": "azure", + "Name": "UK West", + "name_unique": "azure / UK West", + "location": "GB", + "PUE": null + }, + "azure / Central US": { + "provider": "azure", + "Name": "Central US", + "name_unique": "azure / Central US", + "location": "US-IA", + "PUE": null + }, + "azure / East US": { + "provider": "azure", + "Name": "East US", + "name_unique": "azure / East US", + "location": "US-VA", + "PUE": null + }, + "azure / East US 2": { + "provider": "azure", + "Name": "East US 2", + "name_unique": "azure / East US 2", + "location": "US-VA", + "PUE": null + }, + "azure / North Central US": { + "provider": "azure", + "Name": "North Central US", + "name_unique": "azure / North Central US", + "location": "US-IL", + "PUE": null + }, + "azure / South Central US": { + "provider": "azure", + "Name": "South Central US", + "name_unique": "azure / South Central US", + "location": "US-TX", + "PUE": null + }, + "azure / West Central US": { + "provider": "azure", + "Name": "West Central US", + "name_unique": "azure / West Central US", + "location": "US-WY", + "PUE": null + }, + "azure / West US": { + "provider": "azure", + "Name": "West US", + "name_unique": "azure / West US", + "location": "US-CA", + "PUE": null + }, + "azure / West US 2": { + "provider": "azure", + "Name": "West US 2", + "name_unique": "azure / West US 2", + "location": "US-WA", + "PUE": null + }, + "azure / West US 3": { + "provider": "azure", + "Name": "West US 3", + "name_unique": "azure / West US 3", + "location": "US-AZ", + "PUE": null + } + }, + "providersTypes": { + "cloudComputing": "Cloud computing" + }, + "platformName_byType": { + "cloudComputing": { + "gcp": "Google Cloud Platform", + "aws": "Amazon Web Services", + "azure": "Azure" + } + }, + "refValues_dict": { + "memoryPower": 0.3725, + "passengerCar_EU_perkm": 175, + "passengerCar_US_perkm": 251, + "train_perkm": 41, + "flight_economy_perkm": 171, + "treeYear": 11000, + "flight_NY-SF": 570000, + "flight_PAR-LON": 50000, + "flight_NYC-MEL": 2310000, + "streaming_netflix_perhour": 36, + "google_search": 10, + "tree_month": 917 + } + } + }, + { + "id": "coreType_dropdown", + "property": "value", + "value": "CPU" + }, + { + "id": "numberCPUs_input", + "property": "value", + "value": 12 + }, + { + "id": "CPUmodel_dropdown", + "property": "value", + "value": "Xeon E5-2683 v4" + }, + { + "id": "tdpCPU_div", + "property": "style", + "value": { + "display": "none" + } + }, + { + "id": "tdpCPU_input", + "property": "value", + "value": 12 + }, + { + "id": "numberGPUs_input", + "property": "value", + "value": 1 + }, + { + "id": "GPUmodel_dropdown", + "property": "value", + "value": "NVIDIA Tesla V100" + }, + { + "id": "tdpGPU_div", + "property": "style", + "value": { + "display": "none" + } + }, + { + "id": "tdpGPU_input", + "property": "value", + "value": 200 + }, + { + "id": "memory_input", + "property": "value", + "value": 64 + }, + { + "id": "runTime_hour_input", + "property": "value", + "value": 13 + }, + { + "id": "runTime_min_input", + "property": "value", + "value": 0 + }, + { + "id": "location_continent_dropdown", + "property": "value", + "value": "Europe" + }, + { + "id": "location_country_dropdown", + "property": "value", + "value": "Austria" + }, + { + "id": "location_region_dropdown", + "property": "value", + "value": "AT" + }, + { + "id": "server_continent_dropdown", + "property": "value", + "value": "Europe" + }, + { + "id": "server_dropdown", + "property": "value", + "value": "gcp / europe-north1" + }, + { + "id": "location_div", + "property": "style", + "value": { + "display": "flex" + } + }, + { + "id": "server_div", + "property": "style", + "value": { + "display": "none" + } + }, + { + "id": "usageCPU_radio", + "property": "value", + "value": "No" + }, + { + "id": "usageCPU_input", + "property": "value", + "value": 1 + }, + { + "id": "usageGPU_radio", + "property": "value", + "value": "No" + }, + { + "id": "usageGPU_input", + "property": "value", + "value": 1 + }, + { + "id": "PUEquestion_div", + "property": "style", + "value": { + "display": "flex" + } + }, + { + "id": "pue_radio", + "property": "value", + "value": "No" + }, + { + "id": "PUE_input", + "property": "value", + "value": 1.67 + }, + { + "id": "PSF_radio", + "property": "value", + "value": "No" + }, + { + "id": "PSF_input", + "property": "value", + "value": 1 + }, + { + "id": "platformType_dropdown", + "property": "value", + "value": "localServer" + }, + { + "id": "provider_dropdown", + "property": "value", + "value": "gcp" + }, + { + "id": "provider_dropdown_div", + "property": "style", + "value": { + "display": "none" + } + } + ], + "state": [ + { + "id": "aggregate_data", + "property": "data", + "value": { + "coreType": "CPU", + "CPUmodel": "Xeon E5-2683 v4", + "n_CPUcores": 12, + "CPUpower": 7.5, + "GPUmodel": "NVIDIA Tesla V100", + "n_GPUs": 1, + "GPUpower": 0, + "memory": 64, + "runTime_hours": 12, + "runTime_min": 0, + "runTime": 12, + "location": "AT", + "carbonIntensity": 111.18, + "PUE": 1.67, + "PSF": 1, + "selected_platform": "localServer", + "carbonEmissions": 253.64089324800003, + "CE_CPU": 200.524248, + "CE_GPU": 0, + "CE_core": 200.524248, + "CE_memory": 53.116645248, + "energy_needed": 2.2813536, + "power_needed": 190.1128, + "n_treeMonths": 0.2766991562705455, + "nkm_drivingUS": 1.0105214870438248, + "nkm_drivingEU": 1.4493765328457144, + "nkm_train": 6.18636324995122, + "flying_context": 0.005072817864960001, + "flying_text": "Paris-London", + "text_CE": "253.64 g CO2e", + "text_energyNeeded": "2.28 kWh", + "text_treeYear": "0.28 tree-months", + "permalink": "http://calculator.green-algorithms.org//?runTime_hour=12&runTime_min=0&appVersion=v2.2&locationContinent=Europe&locationCountry=Austria&locationRegion=AT&coreType=CPU&numberCPUs=12&CPUmodel=Xeon%20E5-2683%20v4&memory=64&platformType=localServer" + } + } + ] + } diff --git a/README.md b/README.md new file mode 100644 index 0000000..e8e3a0d --- /dev/null +++ b/README.md @@ -0,0 +1,181 @@ +# Experimental protocol + +By means of experiments, we compare a set of energy consumption evaluation tools and methods on different ML computing tasks: the training or fine-tuning of ML models for computer vision and Natural Language Programming (NLP). + +In the different ML contexts, we observe the relative energy consumption evaluation provided by these tools and methods (also compared to an external power meter), as they belong to different approaches: on-chip sensors, mixed on-chip sensors and analytical estimation model, and two different types of analytical estimation models. + +We have chosen 4 different ML computing tasks: +- **Training an image classifier on the MNIST dataset.** Our reference training script is the PyTorch example ``Basic MNIST Example'' ([https://pytorch.org/examples/](https://pytorch.org/examples/)), for image classification using ConvNets, available on GitHub in the repository [pytorch/examples/tree/main/mnist](https://github.com/pytorch/examples/tree/main/mnist). + +- **Training an image classifier on the CIFAR10 dataset.** Our reference training script is the PyTorch tutorial \``Training a classifier'', part of ``Deep Learning with PyTorch: A 60 Minute Blitz,'' available on the pytorch website at [tutorials/beginner/blitz/](https://pytorch.org/tutorials/beginner/blitz/cifar10_tutorial.html). + +- **Training Resnet18 on the ImageNet dataset.** Our reference training script is the recipe for training Resnet18 on ImageNet, provided by PyTorch. The corresponding code is available in the repository [pytorch/vision/references/classification/](https://github.com/pytorch/vision/tree/732551701c326b8338887a3812d189c845ff28a5/references/classification). + +- **Fine-tuning Bert-base on the SQUADv1.1 dataset.** Our reference training script is the recipe for fine-tuning Bert-base (uncased) on the dataset SQUADv1-1, provided by google-research. It is available on GitHub in the repository [google-research/bert/](https://github.com/google-research/bert). + + +## 1. Installation + +Clone this repository and create a folder ``data`` within the cloned repository (the datasets for MNIST and CIFAR10 will be automatically downloaded in this folder when running the experiments). + +Follow the requirements detailed in the next three sections. + +Instructions to download and prepare the datasets ImageNet, CUB_200_2011 and SQUAD-v1-1 are provided in corresponding readme files: +- ``exp-3-resnet18/README-ResNet18.md`` (ImageNet, CUB_200_2011) +- ``exp-4-resnet18/README-Bert-SQUAD.md`` (SQUAD-v1-1) + +## 2. Requirements for operating system and hardware + +A Linux OS is needed to realize these tests. This is mainly due to the fact that the experiments are automatically launched by means of the sh file ``experiment.sh``. On the other side, the evaluation tool Carbon Tracker is only compatible with linux. + +These tests have been realized with a desktop computer with an i9-9900K Intel CPU and two GeForce RTX 2080 Super Nvidia GPUs (though only one GPU has been used during training). + +**Additonal hardware used:** +For external measurements, we are using the smart plug Tapo P110 from TP-Link. + +## 3. Python version and Python packages + +### 3.1. Programming language +Developed and tested using python 3.10 and python 3.7. + +### 3.2. Creating the virtual environment +Create and activate a virtual environment +- with pip: +```Shellsession +demo> python -m venv venv_name # create the virtual environment +demo> source venv_name/bin/activate # activate the virtual environment (linux, mac) +``` +another way of creating the virtual environment is `virtualenv venv_name`; +- with conda: +```Shellsession +demo> conda create --name venv_name # create the virtual environment +demo> conda activate venv_name # activate the virtual environment +``` +Source: https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html. + +### 3.3. Packages + +**For the vision experiments:** +For the experiences 1, 2 and 3, we need to install the packages torch and torchvision for CPU+GPU (i.e., with cuda). We have used a pip virtual environment for the former with Python 3.10.12. + +*PyTorch:* +Depending on your OS, use the installation commands provided at https://pytorch.org/. For instance we have used (for linux+pip+cuda): +```Shellsession +demo> pip3 install torch torchvision --index-url https://download.pytorch.org/whl/cu116 +``` + +**For the NLP experiements:** +For the experience 4, we need to install tensorflow. We have used a conda virtual environment for the latter with Python 3.7.16. + +*TensorFlow:* +We have installed the following packages: tensorflow, tf_slim. To install tensorflow, we have used pip, following the instruction of https://www.tensorflow.org/install/pip. Our installation process for tensorflow is described in detail in the file ``exp-4-bert-squad/README-Bert-SQUAD.md``, however one may now just use the command +```Shellsession +demo> pip install tensorflow[and-cuda] +``` + +*Modification of two TensorFlow files:* +Then, the files 'estimator.py' and 'tpu_estimator.py' in the created virtual environment, should be replaced, respectively, with the files +- ./-4-bert-squad/tf_updated_files/estimator.py +- ./exp-4-bert-squad/tf_updated_files/tpu_estimator.py. + +The files 'estimator.py' and 'tpu_estimator.py' are located in the conda environment folder, in the following respective in the folders +- /lib/python3.7/site-packages/tensorflow_estimator/python/estimator/ +- /lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/. + +*Modification of the Carbon Tracker package:* + +In the file ``/lib/python3.7/site-packages/carbontracker/components/gpu/nvidia.py``, the line +```Python +devices = [name.decode("utf-8") for name in names] +``` +is replaced with +```Python +devices = [name for name in names] +``` + +**For all experiments:** + +*Specific to the TapoP110:* +We need to install a package to communicate with the smart plug Tapo P110: +```Shellsession +demo> pip install --force-reinstall git+https://github.com/almottier/TapoP100.git@main +``` +Previously we were using the origin package ``PyP100`` installed with ``pip install PyP100``, but the current firmware update (of the Tapo) is not compatible with the current version (0.1.2) of this package. The version of PyP100 is now 0.1.4. + +*Other packages:* +Finally, we need to install the following packages (pip install ...): +- carbontracker, codecarbon, eco2ai +- GPUtil +- matplotlib +- numpy +- requests +- tqdm +- pandas +- thop (for flops method) + +Detailed version of the packages used in our work can be found in the file ``./requirements/requirements.txt``. + +## 4. Providing rights for energy files of the CPU + +We need to provide administrator rights (sudo) for several energy evaluation tools. + +*For Carbon-Tracker:* + +```Shellsession +demo> sudo chmod o+r /sys/class/powercap/intel-rapl\:0/energy_uj +``` + +*For Code-Carbon:* + +With reboot needed: +```Shellsession +demo> sudo apt install sysfsutils +``` +Add this line in /etc/sysfs.conf : ``mode class/powercap/intel-rapl:0/energy_uj = 0444``. Then, reboot. + +Without reboot: +```Shellsession +demo> sudo chmod -R a+r /sys/class/powercap/intel-rapl +``` +However, this change will be lost at next boot. + +(Source: https://github.com/mlco2/codecarbon/issues/244) + +## 5. Usage + +Run the file ``experiment.sh``: +```Shellsession +demo> ./experiment.sh +``` + +A new log folder will be created with the results of the experiments. + +One can change parameters in the paragraph "SET BY THE USER" at the top of the ``experiment.sh`` file. These parameters include: + +**Data paths:** +- BERT_BASE_DIR: path to the **contents** of the folder ``/uncased_L-12_H-768_A-12`` +- SQUAD_DIR: path to the folder ``/uncased_L-12_H-768_A-12`` +- IMAGE_NET_DIR: path to the folder ``/imagenet`` + +**List of evaluation tools and methods:** +- 'code_carbon:online' +- 'carbon_tracker:measure' +- 'carbon_tracker:predict' +- 'eco2ai' +- 'green_algorithms:default' +- 'green_algorithms:automated_parallel' +- 'tapo' + +**Usage of a GPU:** 'True' or 'False'. + +**Number of iterations for the experiments:** integer. + +**Choice of ML task:** +- 'idle': no task, corresponds to the idle state of the computer +- 'mnist': training an image classifier on MNIST +- 'cifar10': training an image classifier on CIFAR10 +- 'image_net': training ResNet18 on ImageNet +- 'SQUAD-v1-1': fine-tuning Bert-base on SQUAD-v1-1 +- 'CUB_200_2011': training ResNet18 on CUB_200_2011 (for tests) +- 'SQUAD-extracted': fine-tuning Bert-base on a subset of SQUAD-v1-1 (for tests) + diff --git a/TAPO.py b/TAPO.py new file mode 100644 index 0000000..3892648 --- /dev/null +++ b/TAPO.py @@ -0,0 +1,103 @@ +from PyP100 import PyP110 +import base64 +import json +import datetime +from time import time, sleep +from datetime import datetime, timedelta +import os +import argparse + +parser = argparse.ArgumentParser() +parser.add_argument("--path_logs_and_results", default="./other/tmp", type=str, help="dataset path") +args_parser = parser.parse_args() + + +with open('TAPO-VAR.json', 'r') as f: + d = json.load(f) +d['TAPORUN']=False +d['TAPOSAVED']=False +with open('TAPO-VAR.json', 'w') as f: + json.dump(d, f) + +with open('TAPO-credentials.json', 'r') as f: + cred = json.load(f) +plugip = cred["ip"] +username = cred["email"] +password = cred["password"] +p110 = PyP110.P110(plugip, username, password) + +p110.handshake() +p110.login() + +returnedData = p110.getDeviceInfo() +if 'result' not in returnedData.keys(): + returnedData = {'result' : returnedData} +nickname = base64.b64decode(returnedData['result']['nickname']) +nicknameDecoded = nickname.decode("utf-8") +print ("Plug Name:", nicknameDecoded) +print ("Device IP:", returnedData['result']['ip']) +print ("Device On:", returnedData['result']['device_on']) +print ("Device Model:", returnedData['result']['model']) +print ("Firmware Ver:", returnedData['result']['fw_ver']) +print ("Device ID:", returnedData['result']['device_id']) +print ("MAC:", returnedData['result']['mac']) +print ("Device On Time:", (timedelta(seconds=(returnedData['result']['on_time'])))) +print ("Device Overheated:", returnedData['result']['overheated']) +print ("Power Protection:", returnedData['result']['power_protection_status']) +print ("RSSI:", returnedData['result']['rssi']) +print ("Signal Level:", returnedData['result']['signal_level']) +print (" ") + +print("TAPO-READY") +d['TAPORUN']=True +with open('TAPO-VAR.json', 'w') as f: + d = json.dump(d,f) +TAPORUN = True + +data = {} +power_list = [] +time_list = [] +date_list = [] + +print('xxxxxxxxxxxxxxxxxxxxxxxx') +returnedData = {} +returnedData['result'] = p110.getEnergyUsage() +print(returnedData) +print('xxxxxxxxxxxxxxxxxxxxxxxx') + +while TAPORUN==True: + + time_s = time() + try: + returnedData = p110.getEnergyUsage() + if 'result' not in returnedData.keys(): + returnedData = {'result' : returnedData} + print(returnedData) + power_list.append(returnedData['result']['current_power'] / 1000) + time_list.append(time_s) + date_list.append(returnedData['result']['local_time']) + sleep(2) + except Exception as e: + print("Couldn't retrieve info this time: ", time_s, " error: ", e) + + with open('TAPO-VAR.json', 'r') as f: + d = json.load(f) + TAPORUN = d['TAPORUN'] + +data['power'] = power_list +data['time'] = time_list +data['date'] = date_list + +str_current_datetime = datetime.now().strftime("%Y%m%d-%H%M%S") +file_name = os.path.join(args_parser.path_logs_and_results,'tapo_logs', str_current_datetime+"_tapo.txt") +with open(file_name, 'w') as f: + json.dump(data, f, indent = 4, sort_keys=True) + + +with open('TAPO-VAR.json', 'r') as f: + d = json.load(f) +d['TAPOSAVED']=True +with open('TAPO-VAR.json', 'w') as f: + json.dump(d, f) + +print("END-TAPO") \ No newline at end of file diff --git a/UTIL.py b/UTIL.py new file mode 100644 index 0000000..9459f3e --- /dev/null +++ b/UTIL.py @@ -0,0 +1,69 @@ +import json +from datetime import datetime, timedelta +import os +import time +import psutil +import GPUtil +import argparse + +parser = argparse.ArgumentParser() +parser.add_argument("--path_logs_and_results", default="./other/tmp", type=str, help="dataset path") +args_parser = parser.parse_args() + + +# Initialise the dictionnary .. +with open('UTIL-VAR.json', 'r') as f: + d = json.load(f) +d['UTIL_RUN']=False +d['UTIL_SAVED']=False +with open('UTIL-VAR.json', 'w') as f: + json.dump(d, f) + +print('Dictionnary initialized') + +data = {} +time_list = [] +cpu_util = [] +gpu_util = [] +ram_util = [] + +# .. and say that we are ready to record +d['UTIL_RUN']=True +with open('UTIL-VAR.json', 'w') as f: + d = json.dump(d,f) +UTIL_RUN = True + +print("UTIL READY") +print('UTIL RUN: ', UTIL_RUN) + +while UTIL_RUN==True: + time_s = time.time() + time_list.append(time_s) + cpu_util.append(psutil.cpu_percent()) + gpu_util.append(GPUtil.getGPUs()[0].load) + ram_util.append(psutil.virtual_memory()[3]/10**9) + time.sleep(2) + + with open('UTIL-VAR.json', 'r') as f: + d = json.load(f) + UTIL_RUN = d['UTIL_RUN'] + +data['cpu_util'] = cpu_util +data['gpu_util'] = gpu_util +data['ram_util'] = ram_util +data['time'] = time_list + +# str_current_datetime = str(datetime.now()) +str_current_datetime = datetime.now().strftime("%Y%m%d-%H%M%S") +file_name = os.path.join(args_parser.path_logs_and_results,'util_logs', str_current_datetime+"_tool.txt") +with open(file_name, 'w') as f: + json.dump(data, f, indent = 4, sort_keys=True) + +# Indicate that we have finished saving the data +with open('UTIL-VAR.json', 'r') as f: + d = json.load(f) +d['UTIL_SAVED']=True +with open('UTIL-VAR.json', 'w') as f: + json.dump(d, f) + +print("END-UTIL") \ No newline at end of file diff --git a/creating_plots/plot_results.html b/creating_plots/plot_results.html new file mode 100644 index 0000000..894d2ad --- /dev/null +++ b/creating_plots/plot_results.html @@ -0,0 +1,15379 @@ + + + + + +plots_exp_survey + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/creating_plots/plot_results.ipynb b/creating_plots/plot_results.ipynb new file mode 100644 index 0000000..9ed6942 --- /dev/null +++ b/creating_plots/plot_results.ipynb @@ -0,0 +1,614 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "1092d4c2", + "metadata": {}, + "source": [ + "# Plots of experiments\n", + "\n", + "***\n", + "\n", + "**Import libraries:**" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "091231e5", + "metadata": {}, + "outputs": [], + "source": [ + "import pandas as pd\n", + "import json\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import matplotlib\n", + "import os\n", + "import datetime\n", + "import copy\n", + "\n", + "dev = 'cuda'\n", + "meas = 'energy_consumed'\n", + "comp = 'linux_alienware'\n", + "ml = 'training'" + ] + }, + { + "cell_type": "markdown", + "id": "ccb5de13", + "metadata": {}, + "source": [ + "**Usefull functions and variables:**" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "30446017", + "metadata": {}, + "outputs": [], + "source": [ + "def def_fig_title(exp):\n", + " if exp == 'SQUAD-v1-1':\n", + " fig_title = 'Fine-tuning Bert-base on SQUAD'\n", + " elif exp == 'cifar10':\n", + " fig_title = 'Training on CIFAR10'\n", + " elif exp == 'mnist':\n", + " fig_title = 'Training on MNIST'\n", + " elif exp == 'idle':\n", + " fig_title = 'Idle during 10 minutes'\n", + " else:\n", + " fig_title = 'Training on ImageNet'\n", + " return fig_title\n", + "\n", + "def def_fig_y_axis(exp):\n", + " if exp == 'idle':\n", + " y_label = 'energy consumed (Wh)'\n", + " else:\n", + " y_label = 'energy consumed per epoch (Wh)'\n", + " return y_label\n", + "\n", + "tr = {'energy_consumed':'Energy cons. (kWh)',\\\n", + " 'co2_emissions': 'CO2 equiv. (kg)',\\\n", + " 'time':'Duration (s)',\\\n", + " 'cifar10' : 'CIFAR10',\\\n", + " 'mnist' : 'MNIST',\\\n", + " 'SQUAD-v1-1' : 'SQUAD',\\\n", + " 'image_net' : 'ImageNet',\\\n", + " 'time_error': 'Duration error (s)'\n", + " }\n", + "\n", + "def func_calc_short(calc):\n", + " if calc == 'GA:auto-para':\n", + " temp = 'GA:auto'.split(':')\n", + " return '\\n'.join(temp)\n", + " elif calc == 'FLOPS':\n", + " return 'Flops'\n", + " elif calc == 'CC:on':\n", + " return 'CC'\n", + " elif calc == 'ECO2AI':\n", + " return 'Eco2AI'\n", + " elif calc == 'TAPO':\n", + " return 'EPM'\n", + " elif calc == 'TAPO:dyn':\n", + " return 'EPM\\ndyn'\n", + " elif calc == 'TAPO:tot':\n", + " return 'EPM\\ntot'\n", + " else:\n", + " temp = calc.split(':')\n", + " return '\\n'.join(temp)\n", + "\n", + "def create_calc_labels(calc_list):\n", + " calc_list_label = []\n", + " for calc in calc_list:\n", + " calc_list_label.append(func_calc_short(calc))\n", + " return calc_list_label" + ] + }, + { + "cell_type": "markdown", + "id": "780b5841", + "metadata": {}, + "source": [ + "**Function to load the experiments results into a dataFrame:**\n", + "\n", + "This function will be used to make the error bar plot.\n", + "\n", + "The results data is initially contained in a json file, that we first load in the dictionnary ``d``. A list of ``n_iter`` values is found at: ``d[exp_key][ml_key][comp_key][dev_key][calc_key][meas_key]``." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "40e60357", + "metadata": {}, + "outputs": [], + "source": [ + "def load_exp_results(file_name):\n", + " \"\"\"Loads results of the experiments qnd transform them for the error bar plot:\n", + " - remove idle pozer from tapo\n", + " - converto to a per epoch value\"\"\"\n", + "\n", + " # power_idle = 69 # Watts\n", + " reform = []\n", + " with open(file_name, 'r') as file:\n", + " d = json.load(file)\n", + " for exp_key, exp_dict in d.items(): \n", + " for ml_key, ml_dict in exp_dict.items():\n", + " for comp_key, comp_dict in ml_dict.items():\n", + " for dev_key, dev_dict in comp_dict.items():\n", + " for calc_key, calc_dict in dev_dict.items():\n", + " \n", + " for meas_key, value in calc_dict.items():\n", + " \n", + " if value != [] and 'N/A' not in value:\n", + "\n", + " val_array = np.array(value)\n", + " \n", + " # Special case of the energy:\n", + " if meas_key == 'energy_consumed':\n", + "\n", + " # Converting to Wh (instead of kWh):\n", + " val_array = val_array*10**3\n", + " \n", + " if calc_key == 'TAPO':\n", + " # removing the idle energy consumed:\n", + " duration = np.array(calc_dict['time'])\n", + " # val_array = val_array - duration*power_idle/(3.6*10**3) # in Wh\n", + " val_array = val_array\n", + "\n", + " # Converting all data to a \"per epoch\" value:\n", + " nb_epochs = calc_dict['epochs'][0]\n", + " val_array = val_array/nb_epochs\n", + " \n", + " mean_val = val_array.mean()\n", + " min_val = val_array.min()\n", + " max_val = val_array.max()\n", + " std_val = val_array.std()\n", + " nb_val = val_array.size\n", + "\n", + " if nb_val == 1:\n", + " unbiased_std_val = 0\n", + " else:\n", + " unbiased_std_val = np.sqrt(1/(nb_val-1)*np.sum(np.sum(np.square(val_array - mean_val))))\n", + " \n", + " reform.append((exp_key, ml_key, comp_key, dev_key, calc_key, \n", + " meas_key, mean_val, min_val, max_val, std_val, unbiased_std_val, nb_val))\n", + " \n", + " dat_all = pd.DataFrame(reform, columns = ['exp', 'ml', 'comp', 'dev', 'calc',\n", + " 'meas', 'mean_val', 'min_val', 'max_val', 'std_val', 'unbiased_std_val', 'nb_val'])\n", + "\n", + "\n", + " return dat_all" + ] + }, + { + "cell_type": "markdown", + "id": "90558d7a", + "metadata": {}, + "source": [ + "**Function for plotting error bar:**" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "baed9ec6", + "metadata": {}, + "outputs": [], + "source": [ + "def plot_one_error_bar(x, mean, min_val, max_val, color='#2187bb', horizontal_line_width=0.5):\n", + " color = 'k'\n", + " left = x - horizontal_line_width / 2\n", + " bottom = min_val\n", + " right = x + horizontal_line_width / 2\n", + " top = max_val\n", + " plt.plot([x, x], [top, bottom], color=color)\n", + " plt.plot([left, right], [top, top], color=color)\n", + " plt.plot([left, right], [bottom, bottom], color=color)\n", + " plt.plot(x, mean, 'o', color='#f44336')\n", + "\n", + "def plot_all_error_bar(folder, file_name, name, exp, meas_calc_list, to_save):\n", + "\n", + " dat_all = load_exp_results(os.path.join(folder, file_name))\n", + "\n", + " select = (dat_all['exp'] == exp) & (dat_all['comp'] == comp) & (dat_all['dev'] == dev) \\\n", + " & (dat_all['meas'] == meas) & (dat_all['ml'] == ml)\n", + " dat = dat_all[select]\n", + "\n", + " calc_list = meas_calc_list[meas]\n", + " calc_list_label = create_calc_labels(calc_list)\n", + "\n", + " FS = 14\n", + " matplotlib.rc('xtick', labelsize=FS-2) \n", + " matplotlib.rc('ytick', labelsize=FS-2)\n", + "\n", + " plt.figure(figsize=(6.5, 4.5))\n", + " plt.xticks([i+1 for i in range(len(calc_list))], calc_list_label)\n", + "\n", + " for kk in range(len(calc_list)):\n", + " dat_calc = dat[dat['calc'] == calc_list[kk]]\n", + " mean = dat_calc['mean_val'].values[0]\n", + " min_val = dat_calc['min_val'].values[0]\n", + " max_val = dat_calc['max_val'].values[0]\n", + " plot_one_error_bar(kk+1, mean, min_val, max_val)\n", + "\n", + " plt.grid(alpha = 1, linestyle = '--')\n", + " fig_title = def_fig_title(exp)\n", + " plt.title(fig_title, fontsize = FS +2)\n", + " plt.xlabel('estimation method or tool', fontsize = FS)\n", + " plt.ylabel(def_fig_y_axis(exp), fontsize = FS)\n", + "\n", + " if to_save:\n", + " plt.savefig(os.path.join(folder, name + '_energy' + '.pdf'), bbox_inches='tight') \n", + " plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "00e53d30", + "metadata": {}, + "source": [ + "**Function for plotting evolution:**" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "253169d2", + "metadata": {}, + "outputs": [], + "source": [ + "def plot_evolution(folder, file_name, name, exp, meas_calc_list, to_save):\n", + "\n", + " with open(os.path.join(folder, file_name), 'r') as f:\n", + " d = json.load(f)\n", + " \n", + " # fig = plt.figure(figsize=(4,3.75))\n", + " fig = plt.figure(figsize=(5.5, 6.5))\n", + "\n", + " calc_list = meas_calc_list[meas]\n", + " calc_list_label = create_calc_labels(calc_list)\n", + "\n", + " nb_iter = len(d[exp][ml][comp][dev][calc_list[0]][meas])\n", + " iter_list = [ii + 1 for ii in range(nb_iter)]\n", + "\n", + " d_select = d[exp][ml][comp][dev]\n", + "\n", + " for k in range(len(calc_list)):\n", + " calc = calc_list[k]\n", + " calc_label = calc_list_label[k]\n", + "\n", + " if calc == 'FLOPS' and len(d_select[calc][meas]) == 1:\n", + " meas_list = np.array(d_select[calc][meas]*nb_iter)\n", + " else:\n", + " meas_list = np.array(d_select[calc][meas])\n", + "\n", + " plt.plot(iter_list, meas_list, label = calc_label, marker = 'o')\n", + "\n", + " FS = 14\n", + " plt.xticks(iter_list, fontsize = FS)\n", + " ax = fig.gca(); \n", + " ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))\n", + " # ax.ticklabel_format(style = 'sci')\n", + " plt.grid(alpha = 1, linestyle = '--')\n", + " plt.legend(loc='best', fontsize = FS-2)\n", + " fig_title = def_fig_title(exp)\n", + " plt.title(fig_title, fontsize = FS+2)\n", + " plt.xlabel('Iteration of the experiment', fontsize = FS)\n", + " plt.ylabel('Energy consumed per epoch (Wh)', fontsize = FS)\n", + "\n", + " if to_save:\n", + " plt.savefig(os.path.join(folder, name + '_evolution' + '.pdf'), bbox_inches='tight')\n", + " \n", + " plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "7f8511f4", + "metadata": {}, + "outputs": [], + "source": [ + "def extract_power(folder, file_name, meas_calc_list):\n", + " calc_list = meas_calc_list['energy_consumed']\n", + " with open(os.path.join(folder, file_name), 'r') as file:\n", + " d = json.load(file)\n", + " res = d['idle']['training']['linux_alienware']['cuda']\n", + " res['power'] = {}\n", + " for calc in calc_list:\n", + " pow = [nrj*1000/(t/3600) for nrj, t in zip(res[calc]['energy_consumed'], res[calc]['time'])]\n", + " res['power'][calc] = np.array(pow).mean()\n", + " return(res['power'])" + ] + }, + { + "cell_type": "markdown", + "id": "d14c5286", + "metadata": {}, + "source": [ + "## 5. Final Paper" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "00110d31", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'CT:pred': 28.314770918284268, 'CT:meas': 28.065697815160128, 'CC:on': 52.498491203947104, 'ECO2AI': 24.90006301980474, 'GA:auto-para': 1.5441070978264082, 'TAPO': 66.45618748873903}\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkMAAAHPCAYAAABQo96lAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAACDVElEQVR4nO3dd1hUx8IG8Hd3QYqAiIBCpBhU7KLGLiB2UaPGEmOiRmNyE1tMTG9KYiwpmnY119zEFk1iSbFgYgMxiCUqmtjAgtgFFUEpsrvz/eG3e11ZEJaBZc++v+fxeXROm3nPyg7nzJyjEkIIEBEREdkptbUrQERERGRN7AwRERGRXWNniIiIiOwaO0NERERk19gZIiIiIrvGzhARERHZNXaGiIiIyK6xM0RERER2jZ0hIiIismvsDJHdCw4OhkqlwpIlS8q03ZIlS6BSqfD000+blMfHx0OlUqFr167S6mgpa9Wla9euUKlUiI+Pr9TjllVCQgJmzZqFIUOGGD8HKpUKf/755wO31ev1+M9//oP27dvD3d0d7u7uaN++PRYtWoSq9GB/Q5uIqHjsDBGR3ZoyZQrefvtt/Pzzzzh79mypt9PpdBg2bBief/55/PPPP4iKikJUVBT+/vtv/Otf/8Ljjz8OvV5fgTW3PTNmzIBKpcKMGTOsXRWiIhysXQEiUp5ly5YhNzcXgYGB1q5KiXr27InBgwejdevWaN26NTp37lyqTtGXX36Jn3/+GQ899BB27tyJevXqAQDOnDmDLl26YPXq1YiIiMCkSZMqugkPdOzYMWtXgajKY2eIiKSr6p0gg48//rjM2+j1esydOxcAMHfuXGNHCADq1auHuXPnYtSoUZg9ezYmTJgAtdq6F+AbNWpk1eMT2QLeJiMqgVarxWeffYbmzZvD2dkZPj4+GDJkCP7++2+L93njxg1Mnz4dYWFhcHd3h6urK5o3b46ZM2ciNzfXon0uW7YMbdu2haurK7y8vNCnTx/s3Lmz2PWLG+9kkJaWBpVKheDg4GLLdTod5s2bh1atWsHNzc1kXEpxY4aefvpp4/isM2fOYNSoUahTpw6cnJwQEhKCd955BwUFBWbrpNVq8emnn6JZs2ZwdnaGr68vhg0bhqNHjz6wPTIlJSXh8uXLcHJywpAhQ4osHzJkCKpVq4aLFy9iz549pd7vvZnt3r0b/fr1Q61ateDu7o7IyEiT8/n777+je/fuqFmzJtzc3NCzZ08cOHDA7H6LGzNkGCOVlpaGuLg49OrVCzVr1oSLiwtat26NZcuWmd3fvduZc+85vrcOMTExAICYmBhjncydM61Wi//+97/o2rUrvLy84OTkhHr16uGFF17AuXPnzB5z69atGDBgAGrXrg1HR0fUrFkTDRo0wFNPPYWEhASz2xDdi1eGiIqh1+sxbNgw/Prrr6hWrRq6du2KmjVrYs+ePWjXrh3GjRtX5n0ePXoUffr0wblz5+Dn54cuXbrA0dERe/fuxbvvvou1a9ciPj4eNWrUKPU+X3zxRXzxxRdQq9Xo0qUL/P39cfjwYXTt2hWTJ08ucx1LQwiBxx57DL///jvCw8PRuHFjHDlypNTbJycn48UXX0TNmjURGRmJ69evIzExER9++CGOHDmCX375xWR9vV6PwYMHY8OGDSbnYt++fWjbtq1F58JSBw8eBAA0bdoUzs7ORZa7uLigadOmOHjwIA4ePIiOHTuWaf8bN240dsB79uyJEydOICEhAT179sT27dtx8OBBTJkyBR06dECvXr2QnJyMrVu3IjIyEgcPHkT9+vXLdLzvvvsOM2fOROvWrdGnTx+kpaVh9+7dGDNmDK5fv46pU6eWaX/mjBkzBsnJyTh06BBatmyJsLAw47IuXboY/56Tk4NHH30U8fHxcHNzQ5s2beDj44O///4bX3/9NVavXo0tW7agVatWxm2WLl2KsWPHAgDatWuHqKgo5OXl4fz58/jxxx/h7e2NiIiIcreBFE4Q2bmgoCABQCxevNik/KuvvhIARO3atcXRo0eN5YWFheKFF14QAAQAMWbMGJPt4uLiBAARGRlpUp6bmytCQkIEAPHOO++IgoIC47Lbt2+LJ554QgAQY8eOLXXdN2zYIACI6tWri4SEBJNls2bNMtbx/rosXrzYbN0Nzpw5IwCIoKAgs+UARN26dcWJEyfMbh8ZGSkAiLi4OJPyMWPGGLd/++23hVarNS77+++/RfXq1QUAsWvXLpPtPv/8cwFA+Pn5iePHjxvLtVqtePHFF4s9F2Vl+Czs3Lmz2HVefvllAUAMGjSo2HUeffRRAUC88sorpT62ITOVSiWWL19u9pihoaHCzc1NbN261bhMq9WKIUOGCABi/PjxRfZryOZ+hrY6OjqK9evXmywzfD5q1KghcnNzzW535swZs+0wnOP7/z9Nnz5dABDTp08vNoORI0cKAKJ///7iypUrJsvmz58vAIgGDRqYfG7q1atX7Dm7cuWKOHDgQLHHIzLgbTKiYnz22WcA7s6Cady4sbHcwcEB8+bNQ506dcq0v6VLl+LUqVPo378/PvjgA1SrVs24zNXVFYsWLYKvry+WL1+OGzdulKmOkyZNQnh4uMmyN9980+Q3cNlmzZqFhg0bWrRtmzZt8MEHH0Cj0RjLmjVrhlGjRgG4e9vjXp9//jmAu+ciNDTUWK7RaPDRRx/hoYcesqgelsjJyQEAVK9evdh13NzcAADZ2dll3v/QoUPx1FNPmZS9/fbbAIATJ07ghRdeQPfu3Y3LNBoN3nrrLQDAtm3byny8yZMno3///iZlTz/9NBo1aoSbN2/ir7/+KvM+LXHs2DH88MMP8Pf3x8qVK+Hr62uyfOrUqYiOjkZqaio2bdpkLL9y5Qpq1KhhcoXJwNfX1+QqElFx2BkiMuPChQs4efIkABT5YgIAZ2dnDB8+vEz73LhxIwDg8ccfN7vczc0NjzzyCLRaLfbt2/fA/Wm1WuPzcMzVEQBGjx5dpjqWhbnxMqXVv39/s+NYDJ3OCxcuGMvOnz+P06dPAwBGjhxZZJtq1aph6NChFtelqomOji5S5uXlhVq1ahW7vEGDBgCAixcvlvl4AwYMMFtu7lxUpNjYWAgh0LdvX7i7u5tdx/C8rF27dhnL2rVrh5s3b2L06NHYv38/H2lAFmFniMiM8+fPAwC8vb2Nv+Xf795ZRKVh+EIfNWqUyQDSe//ExsYCADIyMh64v2vXriE/P7/EupS1jqXl6+sLV1dXi7cvbraZh4cHABjbBZTuXNw/0LsiGb6ob9++Xew6t27dAvC/9pRFcdkY2m5uuaFOxQ0+t+R45s5FRTL8//j222+L/f/x2muvATD9/7FgwQI8/PDDWL58OR555BF4enqie/fu+PDDD5Genl4pdSfbxwHURJXE8Btrnz59ULt27RLXDQoKqowqFetBv127uLiUa/+WTDcv6SnKlfmEZUPHq6QvWsOsJ0s6aQ/KRvZUfdn7s/TKjGG7sLAwtGzZssR127dvb/x748aNceLECWzevBnbt2/Hrl27sHPnTmzfvh3vv/8+vv3222KvnBIZsDNEZIZhDEpmZiZu3bpl9opEcVOLixMQEIDjx4/jmWeekXJbp1atWnByckJBQQHS0tLQtGnTUtfRMF7JMP7lfmV5GnNFM5yLjIwM3L592+xYnbKei/Jo3bo1AODIkSPIz88vMqMsLy/POLPOsK6SVNRnJyAgAADQuXNnfPXVV2Xa1sHBAdHR0cZbiNnZ2Zg3bx5iYmLwr3/9C4MHDy5xjBcRb5MRmVG3bl08/PDDAICVK1cWWV5QUIDVq1eXaZ99+/YFAKxatar8FcTdL4DOnTsDAFasWGF2neXLl5stN3Qwjh8/bna5YXxTVRAQEGC8wvLDDz8UWX7nzh2sXbu20urTsWNH1KlTBwUFBWaPu3btWty5cwf+/v4mVzCUwvDZMfdk68uXLxf7vCNDJ0qr1Zpdbvj/sW7dunLfmvPw8MCMGTPg6emJ3NxcpKSklGt/pHzsDBEVw/B8lRkzZph0GnQ6HV555ZUyD1Z97rnnEBQUhNWrV+P11183+5v15cuX8c0335S5jl9++aXJoFIA+Oijj4r9YmrXrh08PDxw9OjRIh2m1atX44svvih1HSrDlClTAADTp083+WLT6/V48803i30YX0VQq9V4/fXXAQCvv/46zpw5Y1x25swZvPHGGwDuzuaz9tOnK0KPHj0A3H36dlZWlrE8IyMDo0ePNo6Xul/dunUBoNjnUbVq1QpDhgzBuXPn8Nhjj5m92nf79m2sWLECV65cAQDk5uZi3rx5ZsfY7dy5E1lZWdBoNMZjExWHt8mIijFx4kRs2bIF69evR8uWLREVFWV86OKlS5fwwgsvYOHChaXeX/Xq1bFx40b0798fH330ERYtWoQWLVqgbt26xt9ejx07Bl9fXzz77LOl2ueAAQMwceJE/Pvf/0Z4eDgiIiLg5+eHw4cP49ixY3jxxReN09Lv5eLigpiYGLz00ksYPXo0Fi5ciIceegjHjh3D0aNH8c477+CDDz4oddsq2pQpU7BlyxZs2rQJLVq0QFRUFDw9PbFv3z5cvHgREyZMwIIFC0weV1Aa//3vf/Hf//7X+O9Lly4BAP71r38ZByX7+fkVeQjk5MmTkZCQgF9++QXNmjUzdhC2bt2K3NxcDB06FBMmTChPk6usiRMn4ptvvsGBAwcQGhqKjh074vbt29i3bx8CAwMxaNAg/Prrr0W26927N6pXr45ff/0VXbp0QYMGDaDRaNC5c2fjQxMXL16MrKwsbNq0CaGhoWjZsiXq1asHIQTS0tJw6NAh3LlzB8eOHUPt2rVx584dTJs2Da+++iqaN2+OBg0awNHR0fjgSODuYwl8fHwqMyKyQcr7tYVIErVajZ9//hmffvop6tevj/j4eGzZsgUtWrTA7t270a5duzLvs2nTpjh8+DA++ugjNG7cGIcPH8bq1auxZ88eVK9eHa+88kqRL94H+eqrr/Ddd9+hVatW2L17N2JjY+Hn54dt27Zh0KBBxW43depULF26FK1bt8bBgwexefNm1K5dG5s3b67UJzqXhkajwW+//YaPPvoIISEhiIuLw9atW9GiRQvs3bvX+Mwnb2/vMu33/Pnz2LNnj/HPnTt3ANx9UrihzPDE6fvrs2bNGnz99ddo0qQJtm3bhm3btqFp06b4+uuvsWrVKkVeFQIAT09PJCYmGh/bsGnTJpw6dQrPPfccdu3aVezT02vXro1NmzahR48eOHr0KJYtW4Zvv/0WO3bsMK7j7u6OzZs3Y+XKlejRowfS09Pxyy+/YPv27cjLy8OTTz6JX375BSEhIQDuzrD7+uuv8fjjj6OgoABbtmzBr7/+iqtXr+Kxxx7Dtm3bjK8BISqJSgghrF0JIqLy6NatG+Li4rB27Vo89thj1q4OEdkYZf7qQkSKk5ycbLxyY3Dnzh3MmDEDcXFx8PX1NftAQiKiB+GYISKyCVOnTkVycjJatmwJPz8/3LhxA3///TcuXboEZ2dnLF261OyLU4mIHoS3yYjIJqxYsQIrVqzA4cOHce3aNQgh4O/vj6ioKEybNg1NmjSxdhWJyEaxM0RERER2jWOGiIiIyK7Z/ZghvV6Pixcvwt3dvVLfb0REREQVRwiBnJwc+Pv7P/BRF3bfGbp48aLxnThERESkLOfOnXvgU8jtvjNkeMrsuXPn4OHhYeXaPJihp8srWeXHLOVinvIwS3mYpTy2lmV2djYCAgKM3/MlsfvOkOGEenh42ERnqLCwEPHx8YiOjoajo6O1q2PTmKVczFMeZikPs5THVrMsTceNA6iJiIjIrrEzRERERHaNnSEb5OBg93c3pWGWcjFPeZilPMxSHqVmafcPXczOzkaNGjVw8+ZNmxgzRERERA9Wlu93XhmyMXq9HlevXoVer7d2VWwes5SLecrDLOVhlvIoOUt2hmyMTqdDUlISdDqdtati85ilXMxTHmYpD7OUR8lZsjNEREREdo2dISIiIrJr7AzZGJVKZTNP/6zqmKVczFMeZikPs5RHyVlyNhlnkxEREZVI6HTQ/50McT0TKi9vqJuHQaXRWLtaJSrL97syHxigYHq9HufOnUNAQMAD38JLJWOWcjFPeZilPMyy/LQ743Dn3/MgMq8ay1Tevqg28WU4hEdZsWbysDNkY3Q6HZKTk+Hv78//2OXELOVinvIwS3nsNUshBHJzc8u/o6QEYM4MCAD33hwTmVdREPMGCt6YAXSMKPdhXF1drXr7jZ0hIiIihcnNzYWbm1u59qEGcLRzS/g7VYPaTEdFLwQuzHgTTRMPobxPHrp16xaqV69ezr1Yzn66yURERFRqnT3dUdfZyWxHCADUKhUCnJ3Q2dO9kmsmH68M2RiVSgUfHx9FjuavbMxSLuYpD7OUx16zdHFxwZUrV8q1D82uHcBXnzxwvV8Wfwtdp8hyHcvFxaVc25cXZ5NxNhkRESnM7du3y32bLNzTHZvaNH7gen33H8POrJxyHasibpPx3WQKptPpcPz4cUU+Dr2yMUu5mKc8zFIeZmm5xKwcnM8vgL6YayZ6IXAuvwCJ5ewIVQW8TWZj9Ho9Tpw4gZCQEGiq+DMeqjpmKRfzlIdZymOvWbq6uuLWrVvl31Exs8mA/x8zNGM2siXNJrMmdoaIiIgURqVSybnt1KMvtE7ORZ8z5FMb1Sa8xOcMERERkfI5hEdB0ykCd5L/wsHt29CqW3dUC3ukyj+BuizYGbIxarUagYGBdvXwsIrCLOVinvIwS3mYpRwqjQYOYY/ASV0NDi1aKKojBHA2GWeTERERKRBnkymYTqfDwYMHOTNCAmYpF/OUh1nKwyzlUXKW7AzZGL1ej/T0dOj15X34OTFLuZinPMxSHmYpj5KzZGeIiIiI7Bo7Q0RERGTX2BmyMWq1GqGhoZwZIQGzlIt5ysMs5WGW8ig5S84m42wyIiIixeFsMgXTarXYtWsXtFqttati85ilXMxTHmYpD7OUR8lZsjNkY4QQyMjIgJ1f0JOCWcrFPOVhlvIwS3mUnCU7Q0RERGTX2BkiIiIiu8bOkI3RaDQICwuDRmHvhbEGZikX85SHWcrDLOVRcpacTcbZZERERIrD2WQKptVqsX37dkWO5q9szFIu5ikPs5SHWcqj5CzZGbIxQgjk5OQocjR/ZWOWcjFPeZilPMxSHiVnyc4QERER2TV2hoiIiMiusTNkYzQaDTp27KjI0fyVjVnKxTzlYZbyMEt5lJwlZ5NxNhkREZHicDaZghUWFmLjxo0oLCy0dlVsHrOUi3nKwyzlYZbyKDnLKtcZunXrFqZPn44+ffrAy8sLKpUKS5YsMbvusWPH0KdPH7i5ucHLywujRo1CRkZG5VbYCpQ4rdFamKVczFMeZikPs5RHqVk6WLsC98vMzMT777+PwMBAtGzZEvHx8WbXO3/+PCIiIlCjRg3MmjULt27dwieffIK///4be/fuRbVq1Sq34kRERGSTqlxnyM/PD5cuXUKdOnXw119/oW3btmbXmzVrFm7fvo39+/cjMDAQANCuXTv07NkTS5YswXPPPVeZ1SYiIiIbVeVukzk5OaFOnToPXG/t2rXo37+/sSMEAD169EDDhg2xatWqiqyiVTk4OCAqKgoODlWuH2tzmKVczFMeZikPs5RHyVnaZIsuXLiAq1ev4pFHHimyrF27doiNjS1224KCAhQUFBj/nZ2dDeDuwDDDoDC1Wg2NRgOdTge9Xm9c11Cu1WpNnsCp0WigVquLLb9/sJnhg3T/vdfiyh0dHaHX66HT6SCEgIODA7RarUm5gUqlgoODQ7F1r4ptur/uldUmQ5Z6vR4ajUYRbbq/vDLbBAAuLi7Gz6kS2mSt86TRaODs7IzCwkKoVCpFtMla50mtVqNatWomWdp6m6x1ngw/M+/Nsiq3qSwDvW2yM3Tp0iUAd2+p3c/Pzw/Xr19HQUEBnJyciiyfPXs2YmJiipRv3rwZrq6uAIDAwEC0atUKhw8fRnp6unGd0NBQNGrUCHv37jUZqB0WFoagoCAkJCQgJyfHWN6xY0f4+vpi8+bNJh+sqKgouLi4FOm0RUdHIy8vD3FxccYyBwcH9OvXD5mZmUhKSjKWu7m5oXv37jh37hySk5ON5T4+PujUqRNSU1Nx4sQJY7kttMnd3R3dunWr9Da1bdsW/v7+imqTNc6Tt7c3MjMzUb9+fZw8eVIRbbLWeQoPD8fOnTtxP1tuk7XOU7NmzfDHH3+YHNPW22St85STk2PyuazqbcrNzUVpVennDBnGDC1evBhPP/20sXznzp2IiIjATz/9hOHDh5ts89577+GDDz7AjRs34OnpWWSf5q4MBQQEIDMz0/gcgqr8G1JhYSG2bNmCXr16wcXFxeo9bxltur/uldUmQ5Z9+vSBk5OTItp0f3lltkmr1WLz5s3o3bu3yUPZbLlN1jpPQghs2rQJPXv2hKOjoyLaZK3zpNfrERsba5KlrbfJWuepsLCwyOeyKrcpOzsb3t7epXrOkE1eGXJxcQEAk06NQX5+vsk693NycjJ7xcjR0dHkhw5wN1hzT9os7n5pceX379eScsPl3vuPdX+5QXF1r8ptelB5RbXJcCwltamkOpa13JI2mduPrbepMs+T4Ye5uZ9L5tYvqe5VpU2WlMtok+HL1VyWttqmksorsk333hq7d3lVbVNxbTOnyg2gLg3D7THD7bJ7Xbp0CV5eXmY7PERERET3s8nO0EMPPQQfHx/89ddfRZbt3bsXYWFhlV+pSuLg4IDo6GhFjuavbMxSLuYpD7OUh1nKo+QsbbIzBABDhgzBhg0bcO7cOWPZtm3bkJKSgmHDhlmxZhUvLy/P2lVQDGYpF/OUh1nKwyzlUWqWVbIz9NVXX2HmzJn47rvvAADr16/HzJkzMXPmTNy8eRMA8NZbb8HV1RVRUVH48ssvMXv2bAwbNgzNmzfH2LFjrVn9CqXVahEXF6fYR6JXJmYpF/OUh1nKwyzlUXKWVfJa1yeffIKzZ88a//3zzz/j559/BgA89dRTqFGjBgICArBjxw68/PLLeOONN1CtWjX069cPn376KccLERERUalVyc5QWlpaqdZr2rRpkedHEBEREZVFlbxNRiVT4uA1a2GWcjFPeZilPMxSHqVmWaUfulgZsrOzUaNGjVI9lImIiIhsQ1m+33llyMbo9XpcvXrV5CmdZBlmKRfzlIdZysMs5VFyluwM2RidToekpKQiL8iksmOWcjFPeZilPMxSHiVnyc4QERER2TWLRkLFxcVh27ZtSExMxPnz55GZmQlXV1f4+PigefPmiIyMRP/+/VGnTh3Z9SUiIiKSqtSdodu3b+OLL77AN998g7NnzxrfFOvs7AwvLy/k5eXhn3/+weHDh7FixQo4OjpiwIABeOmll9C5c+cKa4C9UalUcHd3N74wjyzHLOVinvIwS3mYpTxKzrJUs8m+/vprxMTE4MqVK2jRogWGDx+Ojh074pFHHoG7u7txPSEEUlNTsWfPHmzevBm//fYbbt++jYEDB+LTTz9FvXr1KrQxluBsMiIiIuWRPpts8uTJ6NmzJw4fPozk5GS89dZbiIqKMukIAXd7jQ0bNsSoUaOwfPlyXLlyBf/5z39w6NAhLF++3PIWkZFer8fZs2cVOZq/sjFLuZinPMxSHmYpj5KzLFVn6MiRI1i2bBmaNWtWpp27uLhg/PjxSElJwahRoyyqIJnS6XRITk5W5Gj+ysYs5WKe8jBLeZilPErOslSdoYYNG5brIBqNpkreIiMiIiLi1HoiIiKya+V6ycjly5exf/9+ZGVlFXvZbPTo0eU5BN1HpVLBx8dHkaP5KxuzlIt5ysMs5WGW8ig5S4veTZafn49nn30WP/74Y7EDqYQQUKlUVf7eImeTERERKU9Zvt8tujL0xhtvYMWKFWjYsCGeeOIJ1K1bV7Fvsq1qdDodUlNT0aBBA2g0GmtXx6YxS7mYpzzMUh5mKY+Ss7SoB7Nq1So0adIE+/fvh5OTk+w6UQn0ej1OnDiBkJAQxX0YKxuzlIt5ysMs5WGW8ig5S4sGUGdlZaFPnz7sCBEREZHNs6gzFBoaiitXrsiuCxEREVGls6gz9Oqrr+K3337DyZMnZdeHHkCtViMwMBBqNZ+KUF7MUi7mKQ+zlIdZyqPkLEs1mywhIaFI2Zdffolt27Zh6tSpaN26dbEjtSMiIspfywrE2WRERETKU5bv91J1htRqtdnnChg2LemZA5xaL5dOp8Phw4fRokULxQ1gq2zMUi7mKQ+zlIdZymNrWUqfWv/ee+8p8iFLtkiv1yM9PR3NmjWziQ9jVcYs5WKe8jBLeZilPErOslSdoRkzZlRwNYjIlgmdDvrDB1A75Qj0detAhD0ClcJ+WBKRcpX6OUMFBQWcSk9ERWh3xuHOv+dBZF5FcwDaP36DztsX1Sa+DIfwKGtXj4jogUrdGfL09ETHjh3RtWtXREVFoUOHDnB0dKzIupEZarUaoaGhihzNX9nsNUshBHJzc+XsLCkBmDMDAsC9N9JF5lUUxLyBgjdmAB3LN4nC1dXV7m7T2+tnsyIwS3mUnGWp303m5eWFrKysuxupVHB2dkanTp0QFRWFqKgotGvXzibvIdraAGqi8rp9+zbc3NzKvR81gKOdW8LfqRrUZjoreiFwoeAOmiYegvk3GJbOrVu3UL169XLsgYjsUVm+30vdvbt27RoOHDiAefPmoX///nBycsK2bdvwzjvvoEuXLqhZsyaio6Px8ccf46+//oIF73+lUtBqtdi1axe0Wq21q2Lz7DVLWf83O3u6o66zk9mOEACoVSoEODuhs6d7uY5jjz9L7PWzWRGYpTxKzrLUt8lUKhXCwsIQFhaGqVOnQgiB5ORkxMfHIy4uDjt37sTvv/+O33//HSqVCh4eHoiIiMBvv/1WkfW3O0IIZGRk2OUXhGzMsnzqOJXuNnlp16P/4WdTHmYpj5KztPhV8yqVCq1atUKrVq3w0ksvQQiBgwcPIj4+HkuWLME///yDDRs2yKwrEUkga/zN5YJCqesVx97GCxFR5bO4M3Sv8+fPIy4uzvjn7NmzACBlXAIRyeXq6opbt26Vf0c6HfDsSOBaRvHrePtg09ktQDnGE7q6ulq8LRFRaVjUGbp8+bJJ5+f06dMQQsDDwwNdunTBhAkTEBkZiTZt2siur93TaDQICwuzycHqVY29ZqlSqaQNSNZOmoaCmDeKXe40cRocODGhzOz1s1kRmKU8Ss6y1LPJVq1aZRwflJKSAiEEPD09ER4ejsjISERGRqJVq1Y2N+WOs8mIyufe5wwZqHxqo9qEl/icISKyGunvJgPuPl9ArVajT58+6NWrFyIjI9GiRQubv59va50hrVaLhIQEREREwMFByl1Ou8Us5RE6HQqT9+PYrj/RuFMXOIa14ROoy4GfTXmYpTy2lqX0d5MZ6PV6xMfHIz8/Hzk5Obh16xbatWvHhy9WIiEEcnJyFDmav7IxS3lUGg1ULVoh7fwlNGnRih2hcuJnUx5mKY+Ssyx1Z+jatWvYsWMH4uLiEB8fj/feew8A4OLigg4dOiAyMhJdu3blk6mJiIjIppS6M1SzZk0MGjQIgwYNAnC3c2QYQxQfH4/p06cbn0zdoUMHdO3aFV27dkV4eHhF1Z2IiIio3Eo9ZuhBMjIyEB8fj/j4eGzevBmnT5+GSqWq8k+qtLUxQ3q9HpmZmfD29ra5wepVDbOUi3nKwyzlYZby2FqWFTZmqDharRapqalISUlBSkoKLl68qMh7ilWBWq2Gr6+vtauhCMxSLuYpD7OUh1nKo+QsLera6XQ67N69G3PmzEHv3r1Rs2ZNhIeH491338W2bdvg6uqKxx57DF988YXs+tq9wsJCbNy4EYWF5XuqLzFL2ZinPMxSHmYpj5KzLPWVob179xrHCCUmJuL27dvGqz+enp4YMGCA8Q32LVq0qLAKE6r8rUdbwizlYp7yMEt5mKU8Ss2y1J2hDh06QKVSGZ80HR0dbez8hIWF2fzzhoiIiMg+lboz1KtXL3Tr1g1RUVFo06aNTQyeIiIiInoQabPJbJWtzSYzPPTK3d2dV+PKiVnKxTzlYZbyMEt5bC3Lsny/2/TlndTUVIwYMQJ169aFq6srGjVqhPfffx+5ubnWrlqFcnFxsXYVFINZysU85WGW8jBLeZSaZak6Q3369MG+ffssOsDt27cxZ84c/Pvf/7Zo++KcO3cO7dq1w+7duzFp0iR89tln6NixI6ZPn44nnnhC6rGqEq1Wi9jYWMUOYqtMzFIu5ikPs5SHWcqj5CxLNWYoIyMDHTp0QEREBEaPHo3HHnsMNWrUKHGb3bt34/vvv8ePP/6IvLw8LF26VEqFDZYvX46srCz8+eefaNq0KQDgueeeg16vx7Jly3Djxg3UrFlT6jGJiIhIeUrVGdq/fz+WLl2KmJgYPPPMM3j22WcRGhqKNm3aoHbt2vD09ER+fj6uX7+OEydO4K+//kJOTg40Gg1GjBiBmTNnIjAwUGrFs7OzAQC1a9c2Kffz84NarUa1atWkHo+IiIiUqdSzycaMGYPRo0cjNjYWixcvRnx8PL7//vsi66nVarRo0QKDBw/G+PHj4efnJ7XCBl27dsXcuXPxzDPPICYmBrVq1cKuXbuwcOFCTJkyBdWrV6+Q4xIREZGylGs22bFjx3D+/Hlcu3YNLi4u8PHxQdOmTR94C02WmTNnYtasWcjLyzOWvf3225g5c2ax2xQUFKCgoMD47+zsbAQEBCAzM9M42lytVkOj0UCn00Gv1xvXNZRrtVqT141oNBqo1epiy+9/WqeDw90+6P33XYsrd3R0hF6vh06ngxACWq0Wjo6OJuUGKpUKDg4Oxda9Krbp/rpXVpsMWTo5OUGj0SiiTfeXV2ab7l1+b11suU3WOk+GfRjqoIQ2Wes8qdVqFBQUQK1WG7O09TZZ6zwJIZCfnw8HBwdjllW5TdnZ2fD29q74d5M1btwYjRs3Ls8uyiU4OBgREREYMmQIatWqhY0bN2LWrFmoU6cOJk2aZHab2bNnIyYmpkj55s2b4erqCgAIDAxEq1atcPjwYaSnpxvXCQ0NRaNGjbB3715kZGQYy8PCwhAUFISEhATk5OQYyzt27AhfX19s3rzZ5IMVFRUFFxcXxMbGmtQhOjoaeXl5iIuLM5Y5ODigX79+yMzMRFJSkrHc3d0d3bp1w7lz55CcnGws9/HxQadOnZCamooTJ04Yy9mm4tvUoUMH1K5dW1FtstZ5atasGS5cuICUlBTFtMka56lr167Q6XTYuXOnYtpkrfMUFhaGw4cP49KlS4ppk7XOU25uLuLj422mTWWZWW6zzxn68ccfMW7cOKSkpKBu3brG8rFjx2LVqlVIT09HrVq1imxn61eGCgsLsWXLFvTq1QsuLi5W73nLaNP9da+sNhmy7NOnD5ycnBTRpvvLK7NNWq0WmzdvRu/evaHRaBTRJmudJyEENm3ahJ49e8LR0VERbbLWedLr9YiNjTXJ0tbbZK3zVFhYWORzWZXbVGlXhqxpwYIFaNWqlUlHCAAeffRRLFmyBAcPHkSPHj2KbOfk5AQnJ6ci5YbbTvfSaDQmP9QNDB+W0pbfv19Lyg2Xe+8/1v3lBsXVvSq36UHlFdUmw7GU1KaS6ljWckvaZG4/tt6myjxPhh/m5n4umVu/pLpXlTZZUi6jTYYvV3NZ2mqbSiqvyDbde2vs3uVVtU3Ftc0cm33o4pUrV8yOVzD8EFHicxCIiIhIPpvtDDVs2BAHDx40GZsAAD/88INxRptSFdcbprJjlnIxT3mYpTzMUh6lZmmzY4YSEhLQrVs31KpVC5MmTUKtWrWwYcMGbNq0CePHj8c333xTqv3Y2rvJiIiI6MHs4t1kERER2LVrF9q0aYMFCxZg6tSpOHXqFD788EMsXLjQ2tWrMHq9HlevXjUZZEaWYZZyMU95mKU8zFIeJWdps50hAGjXrh1iY2Nx6dIl3LlzBydOnMBbb72l2Mt4AKDT6ZCUlGR2vBSVDbOUi3nKwyzlYZbyKDlLm+4MEREREZVXqS6h3PvkzrJQqVSc1UVERERVWqk6QxEREUU6Qzdu3MDhw4eh0WgQEBCA2rVr48qVKzh37hx0Oh1atGjBt8ZXAJVKBXd3d4s6p2SKWcrFPOVhlvIwS3mUnKVFs8nOnz+Pzp07Izw8HLNmzTJ5I316ejrefPNNJCYm4s8//yzyUMSqhrPJiIiIlKfCZ5O98sor8PPzw/fff2/SEQLuvmNkxYoVqFOnDl599VVLdk8l0Ov1OHv2rCJH81c2ZikX85SHWcrDLOVRcpYWdYa2bt2K7t27l7hOt27dsHXrVosqRcXT6XRITk5W5Gj+ysYs5WKe8jBLeZilPErO0qLOUH5+vskbgM25ePEi8vLyLKoUERERUWWxqDPUpk0b/Pjjj0hKSjK7fNeuXfjpp5/Qtm3bclWOiIiIqKJZ9HTCDz/8EN27d0d4eDgGDBiALl26wNfXF1evXsXOnTuxYcMGODg4YObMmbLra/dUKhV8fHwUOZq/sjFLuZinPMxSHmYpj5KztPjdZNu2bcNzzz2HM2fO3N2RSgXDrurVq4dFixY9cFxRVcDZZERERMpTlu93i99b0b17d5w8eRJ//vknDh06hJs3b6JGjRpo2bIlunTposieY1Wg0+mQmpqKBg0aQKPRWLs6No1ZysU85WGW8jBLeZScZble4qVSqRAeHo7w8HBZ9aEH0Ov1OHHiBEJCQhT3YaxszFIu5ikPs5SHWcqj5CzL/UbTo0eP4vjx47h9+zZGjRolo05ERERElcbiF7Xu27cPYWFhaN68OYYNG4ann37auCwhIQGurq5Yt26djDoSERERVRiLOkNHjhxBt27dcObMGbz00kvo27evyfLw8HB4e3tj9erVUipJ/6NWqxEYGAi12uJ+LP0/ZikX85SHWcrDLOVRcpYWzSYbOnQo/vjjDxw8eBD169dHTEwM3n//fZOnUj7++OM4dOgQjh8/LrXCsnE2GRERkfJU+LvJduzYgSFDhqB+/frFrhMYGPjAp1RT2el0Ohw8eFCRj0OvbMxSLuYpD7OUh1nKo+QsLeoM5eTkwNfXt8R18vLyFBmYten1eqSnpyvyRXmVjVnKxTzlYZbyMEt5lJylRZ2hgIAA/P333yWuc+DAAYSEhFhUKSIiIqLKYlFnqH///ti8eXOxb6VftWoVdu/ejUGDBpWnbkREREQVzqLnDL311ltYs2YNoqOjMWbMGFy+fBkAsGDBAiQlJeGHH35AcHAwXn75ZamVpbuj+UNDQxU5mr+yMUu5mKc8zFIeZimPkrO0+N1kp0+fxqhRo8y+ub59+/bGDlFVx9lkREREylPhs8kA4OGHH0ZiYiIOHDiABQsWYObMmfjiiy+wZ88eJCUl2URHyBZptVrs2rULWq3W2lWxecxSLuYpD7OUh1nKo+Qsy/06jrCwMISFhUmoCpWGEAIZGRmw8IIe3YNZysU85WGW8jBLeZScpfJu/BERERGVgcVXhnJycvDtt9/i0KFDuHjxIgoLC4uso1KpsG3btnJVkIiIiKgiWdQZ2rdvH/r27YsbN26UeLlMpVJZXDEyT6PRICwsDBqNxtpVsXnMUi7mKQ+zlIdZyqPkLC2aTdapUyfs3bsXs2fPxhNPPAE/Pz+bDYezyYiIiJSnwmeTHTx4ECNGjMCrr76KunXr2mxHyBZptVps375dkaP5KxuzlIt5ysMs5WGW8ig5S4s6Q15eXvDx8ZFdFyoFIQRycnIUOZq/sjFLuZinPMxSHmYpj5KztKgzNGjQIGzfvl2RL2sjIiIi+2JRZ2j27NlwdHTEk08+iQsXLsiuExEREVGlsfh1HAcOHECPHj1w8+ZN1KxZ0+zgJJVKhVOnTpW7khXJ1gZQ6/V6ZGZmwtvbW5Hvh6lMzFIu5ikPs5SHWcpja1mW5fvdoqn127Ztw4ABA5Cfnw9HR0e4uLiYvYeoxPuK1qZWq+Hr62vtaigCs5SLecrDLOVhlvIoOUuLunavv/46hBD46aefkJ+fj3PnzuHMmTNm/5BchYWF2Lhxo9mHXFLZMEu5mKc8zFIeZimPkrO06MrQ0aNH8dRTT2HYsGGy60OloMRpjdbCLOVinvIwS3mYpTxKzdKiK0M+Pj5wcXGRXRciIiKiSmdRZ+jJJ5/Epk2bkJeXJ7s+RERERJXKotlkd+7cweOPP47r169j1qxZaNmyJdzc3CqifhXO1maTGR565e7uzne/lROzlIt5ysMs5WGW8thalhU+m8xwi0wIgYiIiGLXU6lUir2/aE28RSkPs5SLecrDLOVhlvIoNUuLOkPh4eE20StUIq1Wi9jYWERHR8PR0dHa1bFpzFIu5ikPs5SHWcqj5Cwt6gzFx8dLrgYRERGRdVT9R0gSERERVSB2hoiIiMiuWTSbrFu3bqXbuUqFbdu2lblSZXHgwAHMmDEDf/75J/Lz8/Hwww/jueeew5QpU0q1vS3OJtNqtXBwcOC4rXJilnIxT3mYpTzMUh5by7LCZ5M9aMyQSqWCEKLCw9q8eTMGDBiAVq1a4d1334WbmxtOnTqF8+fPV+hxrS0vLw/u7u7WroYiMEu5mKc8zFIeZimPUrO06DaZXq83+ycrKwvbt29H+/btMXToUNy5c0d2fY2ys7MxevRo9OvXD7t27cJLL72EZ599FnPmzMFHH31UYce1Nq1Wi7i4OD6yQAJmKRfzlIdZysMs5VFyllLHDHl4eKBr1674448/sHfvXnz44Ycyd29i5cqVuHLlCj788EOo1Wrcvn0ber2+wo5HREREymTRbbIHcXd3R9++fbF48WK89957FXEIbN26FR4eHrhw4QIGDRqElJQUVK9eHaNGjcL8+fPh7OxsdruCggIUFBQY/52dnQ3g7tt4DW/iVavV0Gg00Ol0Jh0sQ7lWq8W9Q600Gg3UanWx5fe/4dfB4W7s9/euiyt3dHSEXq+HTqcz7kur1ZqUG6hUKjg4OBRb96rYpvvrXlltMrTBsK0S2nR/eWW2ybDtvWW23iZrnSfDOqVtqy20yVrnyeDe+th6m6x1nsx9Lqtym+5vW0kqpDME3G3EpUuXKmr3SE1NhVarxcCBA/HMM89g9uzZiI+Px5dffomsrCz88MMPZrebPXs2YmJiipRv3rwZrq6uAIDAwEC0atUKhw8fRnp6unGd0NBQNGrUCHv37kVGRoaxPCwsDEFBQUhISEBOTo6xvGPHjvD19cXmzZtNPlhRUVFwcXFBbGysSR2io6ORl5eHuLg4Y5mDgwP69euHzMxMJCUlGct37dqF7t2749y5c0hOTjaW+/j4oFOnTkhNTcWJEyeM5bbQJnd3d3Tr1q3S23Tt2jX4+/srqk3WOE/e3t5wcHDAqVOncPLkSUW0yVrnKTw8HBqNBlu2bFFMm6x1npo1awaVSmWSpa23yVrn6datWwBgzLKqtyk3NxelZdFssgc5ffo0OnToAC8vLxw/flz27gEAISEhOH36NJ5//nksXLjQWP7888/jP//5D1JSUtCgQYMi25m7MhQQEIDMzEzjaHOl/obENrFNbBPbxDaxTfbSpuzsbHh7e5dqNplFnaFx48aZLddqtbhw4QL+/PNPFBYWYsGCBXj++efLuvtSadasGY4cOYIdO3aYvB8tISEBkZGRWLp0KUaPHv3A/dja1Hq9Xo/MzEx4e3ubXAKmsmOWcjFPeZilPMxSHlvLssKn1i9ZsqTE5aGhoZg2bRrGjx9vye5Lxd/fH0eOHEHt2rVNyn19fQEAN27cqLBjW5NOp0NSUhKio6Nt4sNYlTFLuZinPMxSHmYpj5KztKgzdObMGbPlarUanp6elfIMgjZt2mDLli24cOECQkNDjeUXL14EcPfeJBEREdGDWNQZCgoKkl2PMhs+fDjmzJmDb7/91uSJ2P/973/h4OCArl27Wq9yREREZDOkzibLzs7Gnj174OzsjC5dulToE6hbtWqFcePG4bvvvoNWq0VkZCTi4+OxevVqvPnmm/D396+wY1uTSqWCu7u7TTwKvapjlnIxT3mYpTzMUh4lZ2nRAOpvvvkG33//PX799VfUrFkTAHDo0CH07dsXV65cAXB3itu909UrQmFhIWbNmoXFixfj4sWLCAoKwsSJEzF16tRS78PWBlATERHRg5Xl+92iEVDLly9HQUGBsSMEANOmTcPVq1cxduxYREdHIykpyWTKe0VwdHTE9OnTkZaWhjt37iA1NbVMHSFbpNfrcfbsWT5tWwJmKRfzlIdZysMs5VFylhZ1hlJSUtCyZUvjv69du4a4uDiMHz8e//3vf7F+/Xq0bdsWK1askFZRukun0yE5ObnIU36p7JilXMxTHmYpD7OUR8lZWtQZysrKMpmttXPnTgDAY489Zizr0qUL0tLSylc7IiIiogpmUWeoVq1aJq/a2LZtGzQaDTp37mwsu/fdT0RERERVlUWdoRYtWuC3337DP//8g5MnT2LlypXo3LkzqlevblwnLS0Nfn5+0ipKd6lUKvj4+ChyNH9lY5ZyMU95mKU8zFIeJWdp0WyyuLg49OjRw6Ts119/xYABAwDcHWTl5+eHbt26FfvC1KqCs8mIiIiUp8Jnk0VFRWHdunUYPHgwBg8ejJ9++snYEQKAxMRE+Pv7m4whIjl0Oh2OHz+uyAFslY1ZysU85WGW8jBLeZScpcUPXezXrx/69etndll4eDgOHjxocaWoeHq9HidOnEBISAg0Go21q2PTmKVczFMeZikPs5RHyVkq601rRERERGVUrtdx7N27F/v27UNWVpbZy2YqlQrvvvtueQ5BREREVKEs6gxdv34dgwYNQmJiIkoaf83OkHxqtRqBgYFQq3lRr7yYpVzMUx5mKQ+zlEfJWVo0m+zpp5/GsmXL0LVrV4wZMwZ169aFg4P5flVkZGS5K1mROJuMiIhIecry/W7RlaENGzagXbt22LZtmyKfN1CV6XQ6HD58GC1atFDcALbKxizlYp7yMEt5mKU8Ss7SomtdeXl5iIiIYEfICvR6PdLT0xX5orzKxizlYp7yMEt5mKU8Ss7Sos5QWFgY3ztGREREimBRZ2j69OlYt24ddu/eLbs+RERERJXKojFDly9fRr9+/RAZGYknn3wSrVu3LnZw0ujRo8tVQTKlVqsRGhqqyNH8lY1ZysU85WGW8jBLeZScpUWzydRqNVQqlcm0+vvHDwkhoFKpqvxjuzmbjIiISHkqfDbZ4sWLLaoYlZ9Wq8XevXvRrl27Yh9nQKXDLOVinvIwS3mYpTxKztKi1owZM0Z2PaiUhBDIyMgo8WGXVDrMUi7mKQ+zlIdZyqPkLJV344+IiIioDMp1nSstLQ0rVqxAcnIysrOz4eHhgbCwMDz55JMIDg6WVEUiIiKiimPRAGoA+Pzzz/Haa69Bq9UWuWTm6OiIjz76CC+++KKUSlYkWxtArdfrce7cOQQEBChyRH9lYpZyMU95mKU8zFIeW8uyLN/vFnWGNmzYgEcffRTe3t546aWXEBUVBT8/P1y+fBlxcXGYN28erl27hnXr1qFfv34WN6Qy2FpniIiIiB6sLN/vFnXt5s2bBy8vLxw4cABvvvkmOnTogKCgILRv3x5vvPEG9u/fj5o1a2LevHkWNYDMEzod7uzfi0NfzsOd/XshqvhjC6o6rVaL7du3Q6vVWrsqisA85WGW8jBLeZScpUVjhg4cOIAnn3wSdevWNbs8ICAAw4cPx8qVK8tVOfof7c443Pn3PIjMq6gPoPC3n6D19kW1iS/DITzK2tWzSUII5OTkKHJmhDUwT3mYpTzMUh4lZ2lRZ+jOnTuoXr16ieu4ubnhzp07FlVKCYQQyM3NlbOzpARgzgwIAPc+2lJkXkVBzBsoeGMG0DGi3IdxdXW1m5fvCp0O+sMHUDvlCPR160CEPQKVwt7CTEREpWNRZ6hhw4ZYv349Zs2aZfbBS1qtFhs2bEDDhg3LXUFblZubCzc3t3LvRw3gaOeW8HeqBrWZjopeCFyY8SaaJh5Ced8jfOvWrQd2cpXg3qtszQFo//gNOl5lIyKyWxZ1hkaPHo1XXnkFvXv3xkcffYQ2bdoYl/3111948803ceLECXzyySfSKmqvOnu6o66zU7HL1SoVApyd0NnTHTuzciqxZpVL2pU2XmWrMBqNBh07doSGV9jKjVnKwyzlUXKWFs0m0+l0GDJkCNatWweVSgVXV1f4+vri6tWryM3NhRACAwcOxNq1a6v89LuKmk2m1+uRmZlZ7v1odu2A81cP7lTmT3oFuk6R5TqWt7d3lT1ft2/fLveVtlJdZSu4w6tsREQKUOHvJtNoNPj111+xbNkyLF26FMnJyUhPT4eHhwfat2+PMWPGYNSoURZVXiny8vJQu3btcu8n3NMdm9o0fuB6g8c+U+4rQ1X5C1zGgL3KvMqmxAGGD1JYWIjNmzejV69ecHR0tHZ1bBqzlIdZyqPkLMv1BOrRo0dj9OjRsupCZiRm5eB8fsEDr2YkKvgWmSx1nEr3n7e061FRSpxyay3MUh5mKY9Ss1TWa2erEFdXV9y6dUvOzooZ5wL8/9WMGbORLWmcS1UlY/zN5YJCqeuVxN7GCxER2TKLOkMbNmzAd999h6+++gr+/v5Fll+8eBGTJk3Cs88+i759+5a7krZIpVLJu+XUoy+0Ts7GGVDGY/jURrUJL9nFDCgpnUudDnh2JHAto/h1vH2w6ewWoJwDBKtyx5KIiExZNIC6b9++uHjxIg4dOlTsOq1atcJDDz2EDRs2lKuCFc2WXschdDro/j6IvAvn4fJQXWiat+KzccpIuzMOBTFvFLvcafocu+hcVgTDA9nc3d15ZaycmKU8zFIeW8uywl/HcejQIbRv377Eddq3b4/k5GRLdk/FUGk00LRsA+de/aBp2YYdIQs4hEfBafocqLx9TcpVPrXZEZLAxcXF2lVQDGYpD7OUR6lZWtQZun79Onx9fUtcx9vbW8rUcjKl1WoRGxur2EFslcEhPAouK36Fw9wv8HfvgXCY+wVcvv+FHaFy4mdTHmYpD7OUR8lZWjRmyMfHBydOnChxnRMnTsDLy8uiShFVNJVGA3WL1rhy/jLULVrzKhsRkR2z6MpQREQE1q9fj8OHD5tdfujQIaxbtw6RkeV7CCARERFRRbOoM/T6668DALp06YL3338fSUlJSE9PR1JSEmJiYhAeHg61Wo0333xTamWJiIiIZLNoNhkArF27FmPGjEFeXp5JuRACbm5uWLZsGQYNGiSjjhXKlmaTAXfz1Wq1cHBwsInR/FUZs5SLecrDLOVhlvLYWpYV/joOABgyZAjCw8OxZMkS7Nu3Dzdv3oSnpyfatWuHMWPGwMfHx9Jd0wPk5eXB3d3d2tVQBGYpF/MsP8MjNPL5CA1p+LmUR6lZlusJ1L6+vnjttddk1YVKQavVIi4uDtHR0Yp7N0xlY5ZyMc/y0+6MMz5cVQ2gAIDK2xfVJr7M2Y4W4udSHiVnyddxEBFZSAiB3NxcOTsr5rU7IvMqCmLeQMEbMwBJr92xhVscRJWJnSEiIgvl5ubCzc2t3PtRAzjauWXJL2Se8SaaJh6CvpzHunXrlrxXBREphEWzyaqqDz/8ECqVCs2aNbN2VSqUgwP7sLIwS7nsLU8L558U0dnTHXWdncx2hID/fyGzsxM6e5Z/rIasOtsSe/tcViSlZmnxbLKq5vz58wgNDYVKpUJwcDD++eefUm1na7PJiKjquHXrlpTBpMNqe2Fxs/oPXG/sPyex+sr1ch0rJydHytUsoqquwt9NVhW98sor6NChAx555BFrV6VC6fV6XL16FXp9eS+WE7OUyx7zlDX25nJBodT1SmJv44Xs8XNZUZScpSI6QwkJCVizZg0+++wza1elwul0OiQlJUGn01m7KjaPWcplj3m6urri1q1b5f6z6ex5oNYDHkfi7YNNZ8+X+1iurq6VE04VYY+fy4qi5Cwtuvl38eJF+Pv7y66LRXQ6HSZPnozx48ejefPmD1y/oKAABQUFxn9nZ2cDAAoLC1FYePe3LrVaDY1GA51OZ9IDNpRrtVqT++4ajQZqtbrYcsN+DQz3XO9/2V1x5Y6OjtDr9dDpdMZ9abVak3IDlUoFBweHYuteFdt0f90rq02GNhi2VUKb7i+vzDYZtr3/B6Utt+lB50mv16NatWpS2qR/YSq0M99GcRyen4pq/39Lrjxt0ul0ivvslXSeDO6tj623yVrnybD9vdtU5Tbd37aSWNQZCg4ORt++ffHss88iOjra5ANX2b7++mucPXsWW7duLdX6s2fPRkxMTJHyzZs3G39jCgwMRKtWrXD48GGkp6cb1wkNDUWjRo2wd+9eZGRkGMvDwsIQFBSEhIQE5OTkGMs7duwIX19fbN682eSDFRUVBRcXF8TGxprUITo6Gnl5eYiLizOWOTg4oF+/fsjMzERSUpKxfNeuXejevTvOnTuH5ORkY7mPjw86deqE1NRUk5fp2kKb3N3d0a1bt0pv07Vr1+Dv76+oNlnjPHl7ewMATp06hZMnTyqiTZV9nnz6PobQnVvgfOt/6+S7eeBEeA9k3LyNaK3W5tpk7fNkmFCzZcsWxbTJWufp1q1bJllW9TaV5bEXFg2gjoiIwJ9//gmVSgU/Pz+MGzcO48aNQ3BwcFl3VS7Xrl1Dw4YN8dZbb2HatGkAgK5duyIzM7PYAdTmrgwFBAQgMzPTOMCqKv+GpNVqsWvXLnTu3BnOzs5W73nLaNP9da+sNhmyDA8PR7Vq1RTRpvvLK7NNOp3O+Nm89xckW26TNc6T0OkgjhzCscRENOzQAQ4tWhufQG2rbSqp7hXdJiEEduzYgU6dOhmPZettstZ50mq1SEhIMMmyKrcpOzsb3t7epRpAbfFsspSUFHzzzTdYvnw5rl69CrVajR49euDZZ5/FwIEDK2X63QsvvICtW7fiyJEjxkvVD+oM3Y+zyYiIiJSnUmaTNWzYEB9//DHOnz+P1atXo2fPnti6dSuGDx+Ohx56CK+//jpSUlIs3f0DpaamYtGiRZgyZQouXryItLQ0pKWlIT8/H4WFhUhLS8P16+WbgloV6fV6nD17VpGj+Ssbs5SLecrDLOVhlvIoOctyD/ZxcHDAkCFDsGnTJqSlpWH69OlQq9X45JNP0LhxY0RFRWHVqlXSH/R14cIF6PV6TJkyBfXq1TP+2bNnD1JSUlCvXj28//77Uo9ZFeh0OiQnJytyNH9lY5ZyMU95mKU8zFIeJWcp7V6WXq/H/v37sW/fPmRkZEAIgcDAQCQmJiIhIQEzZ87E2rVr0aBBAynHa9asGX755Zci5e+88w5ycnLw+eefIyQkRMqxiIiISLnK3Rk6ffo0/vvf/2Lp0qW4fPmy8UrR888/j6ioKFy+fBnz58/H/PnzjWN8ZPD29sagQYOKlBueNWRuGREREdH9LOoMFRYWYu3atfjmm2+wY8cO6PV61KtXD7NmzcLYsWPh6+trXLdOnTqYO3cusrOzsWzZMmkVt1cqlQo+Pj529xTZisAs5WKe8jBLeZilPErO0qLZZD4+Prh+/To0Gg0GDBiAf/3rX+jVq1eJ28yZMwdvvfVWlRt4xdlkREREylPhs8lcXV0RExODs2fPYu3atQ/sCAHAhAkTcObMGUsOR/fQ6XQ4fvy4IgewVTZmKRfzlIdZysMs5VFylhZ1htLS0vDOO+/Az8+v1Nt4eHggKCjIksPRPfR6PU6cOFHlrrDZImYpF/OUh1nKwyzlUXKWFnWGlHi/kIiIiOyTRQOox40b98B11Go1PDw8EBoaiv79++Ohhx6y5FBEREREFcqiztCSJUuMV4fMjb9WqVQm5ZMnT8Z7772Hd955x8JqkoFarUZgYKBVX46rFMxSLuYpD7OUh1nKo+QsLZpNdubMGUydOhV79+7Fiy++iM6dO6N27dq4cuUKEhMT8cUXX6Bdu3Z4++23cejQIcycORPnzp3DypUr8fjjj1dEOyzG2WRERETKU+GzyX766Sfs2bMHycnJeOONNxAeHo6GDRsiPDwcb7zxBg4cOIDdu3cjLi4O48ePR2JiItzc3LBgwQKLGkT/o9PpcPDgQUWO5q9szFIu5ikPs5SHWcqj5Cwt6gx9++23GD58OGrXrm12eZ06dTBs2DB88803AICHHnoI/fv3x6FDhyyvKQG4O5o/PT1dkaP5KxuzlIt5ysMs5WGW8ig5S4s6Q+fPn4eTk1OJ6zg7O+P8+fPGfwcGBiI/P9+SwxERERFVGIs6Qw899BB+/fXXYjs3+fn5+PXXX01mkF29ehU1a9a0rJZEREREFcSiztAzzzyDU6dOoUuXLli3bh2uXbsGALh27RrWrVuHLl264PTp0yZT8Hfu3ImWLVvKqbUdU6vVCA0NVeRo/srGLOVinvIwS3mYpTxKztKi2WQ6nQ5jx47F999/b5xir1arjfcRhRAYOXIkli1bBrVajStXrmDOnDno06cPevfuLbcF5cTZZERERMpT4bPJNBoNli1bhq1bt2L06NEICwtDcHAwwsLCMGbMGGzZsgXff/+9sfdYu3ZtzJ8/v8p1hGyRVqvFrl27oNVqrV0Vm8cs5WKe8jBLeZilPErO0qKHLiYkJMDDwwPdunVDt27dZNeJSiCEQEZGhtmHXVLZMEu5mKc8zFIeZimPkrO06MpQVFQUFi1aJLsuRERERJXOos6Qr68vnJ2dZdeFiIiIqNJZ1Bnq2bMn4uPjFXmprKrTaDQICwuDRqOxdlVsHrOUi3nKwyzlYZbyKDlLi2aTXbx4ER07dkSvXr0wd+5ceHl5VUTdKgVnkxERESlPhc8me+qpp+Dp6YnvvvsODz30EJo0aYKoqCjjgGrDn+7du1vUACqeVqvF9u3bFTmav7IxS7mYpzzMUh5mKY+Ss7RoNll8fLzx7wUFBTh+/DiOHz9eZD3DM4hIHiEEcnJyeItSAmYpF/OUh1nKwyzlUXKWFnWGlPiSNiIiIrJPynumNhEREVEZWHRl6F63bt1CSkoKbt++jfDwcBl1ohJoNBp07NhRkaP5KxuzlIt5ysMs5WGW8ig5S4uvDKWlpWHgwIGoWbMm2rZti6ioKOOyxMRENGnSxGRsEcmhVqvh6+uryBflVTZmKRfzlIdZysMs5VFylha1KD09HR06dEBsbCwGDhyIjh07mgyoat++PTIzM/HDDz9IqyjdVVhYiI0bN6KwsNDaVbF5zFIu5ikPs5SHWcqj5Cwt6gxNnz4dN27cwI4dO7BmzRr07NnTZLmDgwPCw8ORmJgopZJkSonTGq2FWcrFPOVhlvIwS3mUmqVFnaE//vgDgwcPRqdOnYpdJygoCBcuXLC4YkRERESVwaLO0PXr1xEcHFziOkIIFBQUWLJ7IiIiokpjUWeodu3aSE1NLXGdv//+G4GBgRZViorn4OCAqKgoODiUeyKg3WOWcjFPeZilPMxSHiVnafGLWjds2IDDhw+bXb5z505s374d0dHR5aocmefi4mLtKigGs5SLecrDLOVhlvIoNUuLOkPvvPMOXFxcEBERgQ8//BAnT54EAGzatAnvvvsu+vTpA29vb7z66qtSK0t3B6/FxsYqdhBbZWKWcjFPeZilPMxSHiVnadG1ruDgYPzxxx8YMWIE3n33XahUKggh0L9/fwghEBgYiDVr1sDPz092fYmIiIiksvjGX/v27ZGamor169djz549uH79Ojw8PNC+fXsMHDgQ1apVk1lPIiIiogpRrlFQDg4OGDx4MAYPHiyrPkRERESVSiXufXS0HcrOzkaNGjVw8+ZNeHh4WLs6DySEgFarhYODA1QqlbWrY9OYpVzMUx5mKQ+zlMfWsizL97vFV4bu3LmDX3/9Ffv27UNWVhZ0Ol2RdVQqFb799ltLD0HFyMvLg7u7u7WroQjMUi7mKQ+zlIdZyqPULC3qDJ09exY9e/bEqVOnUNKFJXaG5NNqtYiLi0N0dDQcHR2tXR2bxizlYp7yMEt5mKU8Ss7Sos7QSy+9hJMnT2LUqFEYN24c6tatq8iHMBEREZHyWdSD2b59O7p3746lS5fKrg8RERFRpbLooYt6vR6tWrWSXRcqJV6Fk4dZysU85WGW8jBLeZSapUWzyXr27AlnZ2esX7++IupUqWxtNhkRERE9WFm+3y26MjRnzhxs374da9assaiCZDm9Xo+rV69Cr9dbuyo2j1nKxTzlYZbyMEt5lJylRde7Nm7ciKioKDz++OOIjIxE69atzfa6VCoV3n333XJXkv5Hp9MhKSkJ0dHRUKst6svS/2OWcjFPeZilPMxSHiVnaVFnaMaMGca/x8fHIz4+3ux67AwRERFRVWdRZyguLk52Pcps3759WLp0KeLi4pCWloZatWqhQ4cOmDlzJho2bGjt6hEREZGNsKgzFBkZKbseZTZ37lwkJiZi2LBhaNGiBS5fvoyvvvoKrVu3xu7du9GsWTNrV7FCqFQquLu728Sj0Ks6ZikX85SHWcrDLOVRcpY2+26yXbt24ZFHHkG1atWMZampqWjevDmGDh2K77//vlT74WwyIiIi5anw2WTA3cdyz58/H+3atYOHh4fJsweSk5MxYcIEpKSkWLr7B+rUqZNJRwgAGjRogKZNm+LYsWMVdlxr0+v1OHv2rCJH81c2ZikX85SHWcrDLOVRcpYW3SbLy8tDr169sGvXLnh7e8PDwwO3b982Lq9Xrx4WL14MLy8vzJw5U1plH0QIgStXrqBp06bFrlNQUICCggLjv7OzswEAhYWFKCwsBACo1WpoNBrodDqTk24o12q1Ju9k02g0UKvVxZYb9mtg6DhqtdpSlTs6OkKv10On06GwsBDJycnw9fWFi4uLsdxApVLBwcGh2LpXxTbdX/fKapMhyzp16sDJyUkRbbq/vDLbpNVqkZycjNq1a0Oj0SiiTdY6T0IIJCcnw8fHx+QdULbcJmudJ71eXyRLW2+Ttc6T4f/4vVlW5Tbd37aSWNQZmjVrFhITEzFnzhy8+uqriImJwQcffGBcXqNGDURGRuKPP/6o1M7QihUrcOHCBbz//vvFrjN79mzExMQUKd+8eTNcXV0BAIGBgWjVqhUOHz6M9PR04zqhoaFo1KgR9u7di4yMDGN5WFgYgoKCkJCQgJycHGN5x44d4evri82bN5t8sKKiouDi4oLY2FiTOkRHRyMvL89kgLqDgwP69euHzMxMJCUlGct37dqF7t2749y5c0hOTjaW+/j4oFOnTkhNTcWJEyeM5bbQJnd3d3Tr1q3S23Tt2jX4+/srqk3WOE/e3t4AgFOnTuHkyZOKaJO1zlN4eDgAYMuWLYppk7XOk2H86L1Z2nqbrHWebt26ZZJlVW9Tbm4uSsuiMUMNGzZEQEAAtm3bBgCIiYnB+++/b9IDnDBhAtauXYsrV66UdfcWOX78ONq3b4+mTZti586dJr+Z3svclaGAgABkZmYa7ylW5d+QCgsLsWXLFvTq1YtXhiRcGdqyZQv69OnDK0OSrgxt3rwZvXv35pUhCVeGNm3ahJ49e/LKkIQrQ7GxsSZZ2nqbrHWeCgsLi3wuq3KbsrOz4e3tXaoxQxZdGUpPT8fgwYNLXMfd3R03b960ZPdldvnyZfTr1w81atTAmjVriu0IAYCTkxOcnJyKlDs6Opr80AHuBmtuX8W9m6W48vv3a0m5Wq2GWq2GSqUqcrnX3MOviqt7VWxTactlt8mQpWFbJbSpNHUsa3lp22TI08HBwWx9bLFNBpV9nrRaLXx8fFCtWjWz29himywpl9EmIUSxWdpqm0oqr8g2qdVqs1lW1TYV1zaz+yr1mvdwd3fH1atXS1zn1KlT8PHxsWT3ZXLz5k307dsXWVlZ2LlzJ/z9/Sv8mNbk4OCATp06WbsaisAs5WKe8jBLeZilPErO0qLZZB06dMD69euRlZVldvm5c+cQGxuLiIiI8tTtgfLz8zFgwACkpKRgw4YNaNKkSYUeryrQ6XQ4fvy4yaVHsgyzlIt5ysMs5WGW8ig5S4s6Q6+++ipu3LiB7t27IzEx0XhvMTc3F9u2bUPv3r2h1Wrx8ssvS63svXQ6HR5//HEkJSVh9erV6NixY4UdqyrR6/U4ceKEIqc2VjZmKRfzlIdZysMs5VFylhbdJouIiMBXX32FF1980eTqj7u7O4C79/0WLFiANm3ayKmlGdOmTcO6deswYMAAXL9+vchDFp966qkKOzYREREph0WdIQB44YUX0LVrV3z99dfYs2cPrl+/Dg8PD7Rv3x4TJkwo8Vk/Mhim661fvx7r168vspydISIiIioNiztDANC4cWN8/vnnsupSJvHx8VY5rrWp1WoEBgaaHaFPZcMs5WKe8jBLeZilPErO0mbfTSYL301GRESkPJXybjKyDp1Oh4MHDypyNH9lY5ZyMU95mKU8zFIeJWfJzpCN0ev1SE9PV+Ro/srGLOVinvIwS3mYpTxKzpKdISIiIrJr7AwRERGRXWNnyMao1WqEhoYqcjR/ZWOWcjFPeZilPMxSHiVnydlknE1GRESkOJxNpmBarRa7du0yvgKFLMcs5WKe8jBLeZilPErOkp0hGyOEQEZGBuz8gp4UzFIu5ikPs5SHWcqj5CzZGSIiIiK7xs4QERER2TV2hmyMRqNBWFgYNBqNtati85ilXMxTHmYpD7OUR8lZcjYZZ5MREREpDmeTKZhWq8X27dsVOZq/sjFLuZinPMxSHmYpj5KzZGfIxgghkJOTo8jR/JWNWcrFPOVhlvIwS3mUnCU7Q0RERGTX2BkiIiIiu8bOkI3RaDTo2LGjIkfzVzZmKRfzlIdZysMs5VFylpxNxtlkREREisPZZApWWFiIjRs3orCw0NpVsXnMUi7mKQ+zlIdZyqPkLNkZskFKnNZoLcxSLuYpD7OUh1nKo9Qs2RkiIiIiu8bOEBEREdk1DqC2sQHUhodeubu7Q6VSWbs6No1ZysU85WGW8jBLeWwtSw6gVjgXFxdrV0ExmKVczFMeZikPs5RHqVmyM2RjtFotYmNjFTuIrTIxS7mYpzzMUh5mKY+Ss2RniIiIiOwaO0NERERk19gZIiIiIrvG2WQ2OJtMq9XCwcHBJkbzV2XMUi7mKQ+zlIdZymNrWXI2mcLl5eVZuwqKwSzlYp7yMEt5mKU8Ss2SnSEbo9VqERcXp8jR/JWNWcrFPOVhlvIwS3mUnCU7Q0RERGTX2BkiIiIiu8bOkA1ycHCwdhUUg1nKxTzlYZbyMEt5lJolZ5PZ2GwyIiIiejDOJlMwvV6Pq1evQq/XW7sqNo9ZysU85WGW8jBLeZScJTtDNkan0yEpKQk6nc7aVbF5zFIu5ikPs5RD6HQoPLgPp5f+F4UH90Ewz3JR8udSmTf/iIjIpgghkJubK2+HSQkQ3/wbqmsZaA5A+8dv0NbyAZ6dCHSMkHIIV1dXm3j4ID0YO0NERGR1ubm5cHNzk7KvR31q4vvm9QHApLOiz7wKzJ6Op/4+iXUZN8p9nFu3bqF69erl3g9ZH2+T2RiVSgV3d3f+NiIBs5SLecpjj1nKmsujBvBRw8C7f78vP8O/5zYMlPLlZ2/zj5T8ueRsMs4mIyKyulu3bsHd3b3c+wn3dMemNo0fuF7f/cewMyunXMfKycmRdjWL5ONsMgXT6/U4e/asIkfzVzZmKRfzlMces5R1taGOk6PU9UqixCskJVHy55JjhmyMTqdDcnIy/P39oVazL1sezFIu5imPPWbp6uqKW7dulX9HfycD77z8wNUW/7oOi5uHletQrq6u5dre1ij5c2nTnaGCggK89957WL58OW7cuIEWLVpg5syZ6Nmzp7WrRkREZaBSqaQMRhZtOyDP2xci82rxx/KpDZe2HaDSaMp9PFIGm+7aPf3005g3bx6efPJJfP7559BoNIiOjsaff/5p7aoREZEVqDQaVJtY8pWhahNeYkeITNhsZ2jv3r348ccfMXv2bHz88cd47rnnsH37dgQFBeG1116zdvUqjEqlgo+Pj93dq64IzFIu5ikPsywfh/AoOE2fA5W3r0m5yqc2nKbPgUN4lJVqZtuU/Lm02dlkr732GubNm4fr16+bjBKfPXs23nrrLaSnpyMgIOCB++FsMiIiZRI6HfR/J0Ncz4TKyxvq5mG8ImRHyvL9brNjhg4ePIiGDRsWaWC7du0AAMnJyWY7QwUFBSgoKDD+Ozs7GwBQWFiIwsJCAIBarYZGo4FOpzMZNW8o12q1Js+X0Gg0UKvVxZYb9mtgeOuvVqstVbmjoyP0ej10Oh10Oh1OnTqF+vXrw8nJyVhuoFKp4ODgUGzdq2Kb7q97ZbXJkGXDhg3h6OioiDbdX16ZbdLr9Thz5gwefvhhk98cbblN1jpPKpUKqampqFevHjT3fHnbcpusdZ7QPAwpJ07g4YcfhkavB/R6m2+Ttc6TTqfDiRMnEBISYvxcVuU23d+2kthsZ+jSpUvw8/MrUm4ou3jxotntZs+ejZiYmCLlmzdvNs4MCAwMRKtWrXD48GGkp6cb1wkNDUWjRo2wd+9eZGRkGMvDwsIQFBSEhIQE5OT877kVHTt2hK+vLzZv3mzywYqKioKLiwtiY2NN6hAdHY28vDzExcUZyxwcHNCvXz9kZmYiKSnJWH758mV0794d586dQ3JysrHcx8cHnTp1QmpqKk6cOGEst4U2ubu7o1u3bpXeppo1a8Lf319RbbLGefL29kZmZiZ0Oh1OnjypiDZZ6zyFh4cjJSUFKSkpimmTtc5Ts2bNkJqaitTUVMW0yVrnKScnBydPnjT+/67qbSrL611s9jZZSEgIQkNDi5yw06dPIyQkBPPnz8fUqVOLbGfuylBAQAAyMzONV5mq8m9IhYWF2LJlC3r16gUXFxer97xltOn+uldWmwxZ9unTB05OTopo0/3lldkmrVaLzZs3o3fv3iZXM2y5TdY6T0IIbNq0CT179oSj4/+eh2PLbbLWedLr9YiNjTXJ0tbbZK3zVFhYWORzWZXblJ2dDW9vb2XfJnNxcTHp1Bjk5+cbl5vj5OQEJyenIuWOjo4mP3SAu8He+0PdwPBhKW35/fu1pFytVps818FwrPvLDYqre1Vu04PKK6pNhmMpqU0l1bGs5Za0ydx+bL1NlXmeDD/Mzf1cMrd+SXWvKm2ypFxGmwxfruaytNU2lVRekW0y3P6+P8uq2qbi2maOzc4m8/Pzw6VLl4qUG8r8/f0ru0qVQq1WIzAwUHEPvLIGZikX85SHWcrDLOVRcpY226KwsDCkpKQYB0Ab7Nmzx7hciTQaDVq1amW290xlwyzlYp7yMEt5mKU8Ss7SZjtDQ4cOhU6nw6JFi4xlBQUFWLx4Mdq3b1+qafW2SKfT4eDBgyb3YckyzFIu5ikPs5SHWcqj5CxttjPUvn17DBs2DG+++SZee+01LFq0CN26dUNaWho++ugja1evwuj1eqSnpyvyRXmVjVnKxTzlYZbyMEt5lJylzQ6gBoBly5bh3XffNXk32YYNGxAREWHtqhEREZGNsOnOkLOzMz7++GN8/PHHFu/DMC3v/rFHVVVhYSFyc3ORnZ1dppHyVBSzlIt5ysMs5WGW8thalobv9dI8QcimO0MyGB7YpNQxRkRERPYsJycHNWrUKHEdm33ooix6vR4XL16Eu7u7Tbx8zvCQyHPnzvFdauXELOVinvIwS3mYpTy2lqUQAjk5OfD393/g4wDs/sqQWq1G3bp1rV2NMvPw8LCJD6MtYJZyMU95mKU8zFIeW8ryQVeEDGx2NhkRERGRDOwMERERkV1jZ8jGODk5Yfr06Wbfr0ZlwyzlYp7yMEt5mKU8Ss7S7gdQExERkX3jlSEiIiKya+wMERERkV1jZ4iIiIjsGjtDREREZNfYGSIiIiK7xs5QFXXq1Cn861//wsMPPwxnZ2d4eHigc+fO+Pzzz/H6669DpVI98E/Xrl2t3YwqgVnKU1KWeXl5xvV0Oh0WL16Mrl27wsvLC05OTggODsbYsWPx119/WbEF5bNkyZISPye7d++Wejy9Xo8lS5bg0UcfRUBAAKpXr45mzZph5syZyM/PL3a7Y8eOQaVSwdnZGVlZWWbX6dq1K5o1aya1vtZy5swZTJo0CQ0bNoSrqytcXV3RpEkTTJw4EYcPHza7zWuvvQaVSoXHH3+8kmtrfaX9HN9bplar4e/vj169eiE+Pt5kf8HBwVCpVOjRo4fZ433zzTfG/VTV//92/zqOqmjjxo0YNmwYnJycMHr0aDRr1gx37tzBn3/+iVdffRVdunTB8uXLjevfunULL7zwAgYPHozHHnvMWF67dm1rVL9KYZbyPCjLI0eOYNGiRcjLy8Njjz2G33//HREREXjrrbfg5eWFtLQ0rFq1CkuXLkV6erpNvgbH4P3330e9evWKlNevX1/qcXJzczF27Fh06NABzz//PHx9fZGUlITp06dj27Zt2L59u9l3Kn7//feoU6cObty4gTVr1mD8+PFS61WVbNiwAY8//jgcHBzw5JNPomXLllCr1Th+/Dh+/vlnLFy4EGfOnEFQUJBxGyEEfvjhBwQHB2P9+vXIycmBu7u7FVthHaX5HPfs2ROjR4+GEAJnzpzBggUL0K1bN2zcuBF9+/Y1rufs7Iy4uDhcvnwZderUMdnfihUr4OzsXGIH3uoEVSmnT58Wbm5uolGjRuLixYtFlqemporPPvvMpCwjI0MAENOnT6+kWtoGZilPWbKcOHGiACDmz59fZD2tVis+/vhjce7cuYqucoVYvHixACD27dtXKccrKCgQiYmJRcpjYmIEALFly5Yiy/R6vQgODhYvv/yyGDx4sOjatavZfUdGRoqmTZtKr3NlOnnypKhevbpo3Lix2c9lYWGh+Pzzz0V6erpJ+fbt2wUAsX37duHo6CiWLFlSWVWuEkr7OQYgJk6caFJ2+PBhAUD06tXLWBYUFCS6d+8uPDw8ivxMPXfunFCr1WLIkCGV+n+nrHibrIr56KOPcOvWLXz77bfw8/Mrsrx+/fp48cUXrVAz28Ms5SltlufPn8d//vMf9OzZE1OnTi2ynkajwSuvvGLTV4VKQ6/X4/PPP0fz5s3h7OwMHx8f9OnTx+QWgVarxQcffICQkBDjbcS33noLBQUFxnWqVauGTp06Fdn/4MGDAdy9HXa/xMREpKWlYcSIERgxYgQSEhJw/vz5Cmil9X300Ue4ffs2Fi9ebPZz6eDggClTpiAgIMCkfMWKFWjSpAmioqLQo0cPrFixorKqbPOaN28Ob29vnDlzxqTc2dkZjz32GFauXGlS/sMPP6BmzZro3bt3ZVazzNgZqmLWr1+Phx9+2OwPQCobZilPabPctGkTtFotRo0aVUk1s46bN28iMzPT5M+1a9eMy5955hlMnToVAQEBmDt3Lt544w04OzubjCkaP3483nvvPbRu3Rrz589HZGQkZs+ejREjRjzw+JcvXwYAeHt7F1m2YsUKhISEoG3bthgwYABcXV3xww8/SGh11bNhwwbUr18f7du3L/U2BQUFWLt2LZ544gkAwBNPPIHt27cbM7UnD/ocm3Pjxg3cuHEDtWrVKrJs5MiR2Lt3L06dOmUsW7lyJYYOHQpHR0fp9ZfK2pem6H9u3rwpAIiBAweWaTve2imKWcpTlixfeuklAUAcPHiwwutlDYbbC+b+ODk5CSH+dwtmypQpRbbX6/VCCCGSk5MFADF+/HiT5a+88orx9k1JevToITw8PMSNGzdMyu/cuSNq1aol3n77bWPZyJEjRcuWLYvsw9Zvkxk+l4MGDSqy7MaNGyIjI8P4Jzc317hszZo1AoBITU0VQgiRnZ0tnJ2dzd7WVarSfI6FuHub7JlnnhEZGRni6tWrYs+ePaJ79+4CgPj000+N6wUFBYl+/foJrVYr6tSpIz744AMhhBBHjx4VAMSOHTsq/RZzWXEAdRWSnZ0NAHY5kE82ZilPWbK0l9z//e9/o2HDhiZlGo0GALB27VqoVCpMnz69yHaGwc6xsbEAgJdfftlk+bRp0/DJJ59g48aNiIqKMnvsWbNmYevWrViwYAE8PT1Nlm3atAnXrl0zXvUA7l75GDBgAI4cOYKmTZuWraFVmOGz5ubmVmRZ165dcejQIeO/P/74Y7zyyisA7l45e+SRR4yDhN3d3dGvXz+sWLHC7K1dJSvpc2zw7bff4ttvvzX+29nZGS+//HKxt8GHDx+OH374Ae+88w5WrFiBgIAAhIeH4/Tp0xXSBlnYGapCPDw8AAA5OTlWrontY5bylCVLe8m9Xbt2eOSRR8wuO3XqFPz9/eHl5VXs9mfPnoVarS4y+6xOnTrw9PTE2bNnzW73008/4Z133sEzzzyDF154ocjy77//HvXq1YOTkxNOnjwJAAgJCYGrqytWrFiBWbNmlbaJVZ6hw33r1q0iy/7zn/8gJycHV65cwVNPPWUsz8rKQmxsLCZNmmTMBwA6d+6MtWvXIiUlpUjnQMlK+hwbDBw4EJMmTYJKpYK7uzuaNm2K6tWrF7v+yJEj8cUXX+DQoUNYuXIlRowYYXbGY1XDzlAV4uHhAX9/f/zzzz/WrorNY5bylCXLRo0aAQD+/vtvhIWFVXDNbF9ZviS2bNmC0aNHo1+/fvj666+LLM/Ozsb69euRn5+PBg0aFFm+cuVKfPjhhzbxxVQaNWrUgJ+fn9nPpWEMUVpamkn56tWrUVBQgE8//RSffvppke1WrFiBmJiYCqmvrapbt26xzw8yp3379ggJCcHUqVNx5swZjBw5sgJrJw8HUFcx/fv3x6lTp5CUlGTtqtg8ZilPabPs27cvNBoNvv/++0qqWdUTEhKCixcv4vr168WuExQUBL1ej9TUVJPyK1euICsry+SZOACwZ88eDB48GI888ghWrVoFB4eiv8f+/PPPyM/Px8KFC7F69WqTPzNnzsTZs2eRmJgop5FVRL9+/XDy5Ens3bu3VOuvWLECzZo1K5LP6tWr0aNHjyIzocgyTzzxBOLj49G4cWPb+aXI2oOWyJThuRlNmjQRly9fNrucz8YpHWYpT1myfP755wUA8cUXXxRZT6fTiU8++UTRzxkqywDq5557zmT5a6+9VmQA9dGjR0WtWrVE06ZNxfXr14s9bvfu3cXDDz9sdll+fr5wc3MTzz//vLHM1gdQCyFESkqKcHV1FU2bNjX7uTx9+rQAID7++GORnp4uVCqVeP/9983ua8WKFQKA2L17d0VX2+rK85whcwwDqA3S0tLE9OnTRWxsbJmPaS28TVbFhISEYOXKlXj88cfRuHFjkyf97tq1C6tXr8bTTz9t7WraBGYpT1my/PTTT3Hq1ClMmTIFP//8M/r374+aNWsiPT0dq1evxvHjx0s1fbwq27RpE44fP16kvFOnToiKisKoUaPwxRdfIDU1FX369IFer8fOnTsRFRWFSZMmoWXLlhgzZgwWLVqErKwsREZGYu/evVi6dCkGDRpkHDydk5OD3r1748aNG3j11VexceNGk+OFhISgY8eOuHjxIuLi4jBlyhSz9XVyckLv3r2xevVqfPHFF1V/mnMpNWjQACtXrsQTTzyB0NBQ4xOoxf8/LXnlypVQq9WoW7cuVq5cCSEEHn30UbP7io6OhoODA1asWFGmqfq2rKTP8cMPP2zxfoOCgjBjxoxy1MwKrN0bI/NSUlLEs88+K4KDg0W1atWEu7u76Ny5s/jyyy9Ffn6+ybq8mlEyZilPabPUarXiv//9rwgPDxc1atQQjo6OIigoSIwdO9amp92XNCUZgFi8eLEQ4n9P2m7UqJGoVq2a8PHxEX379hX79+837quwsFDExMSIevXqCUdHRxEQECDefPNNkxzPnDlT4vHGjBkjhBDi008/FQDEtm3biq37kiVLBADx22+/CSGUcWXI4OTJk+KFF14Q9evXF87OzsLFxUU0atRIPP/88yI5OVkIIUTz5s1FYGBgifvp2rWr8PX1FYWFhZVRbasp7ecYFl4ZKumYVfXKkEoIISqp30VERERU5XAANREREdk1doaIiIjIrrEzRERERHaNnSEiIiKya+wMERERkV1jZ8jOxMfHQ6VSIT4+3tpVISIiqhLYGSIiIruyYMECLFmyxNrVqFJ27dqFGTNmICsry6LtbT1TdoaIiMiu2PoXd0XYtWsXYmJi2Bmiqk2v1yM/P9/a1SAiIlIcdoYq2YwZM6BSqXD8+HEMHz4cHh4eqFWrFl588UWTzo5KpcKkSZOwYsUKNG3aFE5OTvj9998BABcuXMC4ceNQu3ZtODk5oWnTpvjuu++KHOv8+fMYNGgQqlevDl9fX7z00ksoKCiotLbKZsguJSUFTz31FGrUqAEfHx+8++67EELg3LlzGDhwIDw8PFCnTh18+umnJtsXFBRg+vTpqF+/PpycnBAQEIDXXnutSCaLFy9Gt27d4OvrCycnJzRp0gQLFy4sUp+//voLvXv3hre3N1xcXFCvXj2MGzeuQjMgsjdnz57FhAkTEBoaChcXF9SqVQvDhg1DWlqayXqGnw/3W7JkCVQqlXH94OBgHDlyBDt27IBKpYJKpULXrl2N658+fRrDhg2Dl5cXXF1d0aFDhyLvhFOaGTNm4NVXXwUA1KtXz5hLWloatFotPvjgA4SEhMDJyQnBwcF46623TH5uPihTW8AXtVrJ8OHDERwcjNmzZ2P37t344osvcOPGDSxbtsy4zvbt27Fq1SpMmjQJ3t7eCA4OxpUrV9ChQwdjZ8nHxwebNm3CM888g+zsbEydOhUAkJeXh+7duyM9PR1TpkyBv78/li9fju3bt1upxfIYXhY6Z84cbNy4ETNnzoSXlxf+85//oFu3bpg7dy5WrFiBV155BW3btkVERAT0ej0effRR/Pnnn3juuefQuHFj/P3335g/fz5SUlLw66+/Gve/cOFCNG3aFI8++igcHBywfv16TJgwAXq9HhMnTgQAXL16Fb169YKPjw/eeOMNeHp6Ii0tDT///LOVUiFSpn379mHXrl0YMWIE6tati7S0NCxcuBBdu3bF0aNH4erqWqb9ffbZZ5g8eTLc3Nzw9ttvAwBq164NALhy5Qo6deqE3NxcTJkyBbVq1cLSpUvx6KOPYs2aNRg8eLD09lUFjz32GFJSUvDDDz9g/vz58Pb2BgD4+Phg/PjxWLp0KYYOHYpp06Zhz549mD17No4dO4ZffvkFQMmZ2gwrvxvN7kyfPl0AEI8++qhJ+YQJEwQAcejQISHE3RfkqdVqceTIEZP1nnnmGeHn5ycyMzNNykeMGCFq1KghcnNzhRBCfPbZZwKAWLVqlXGd27dvi/r16wsAIi4urgJaV7EM2T333HPGMq1WK+rWrStUKpWYM2eOsfzGjRvCxcXF+CLL5cuXC7VaLXbu3Gmyz6+//loAEImJicYyQ4b36t27t3j44YeN//7ll1+q9EsHiZTC3P/HpKQkAUAsW7bMWGb4+XA/wwtCz5w5Yyxr2rSpiIyMLLLu1KlTBQCTnxM5OTmiXr16Ijg4WOh0uvI1pgr7+OOPi+SUnJwsAIjx48ebrPvKK68IAGL79u3GsuIytRW8TWYlhisMBpMnTwYAxMbGGssiIyPRpEkT47+FEFi7di0GDBgAIQQyMzONf3r37o2bN2/iwIEDxv34+flh6NChxu1dXV3x3HPPVWSzKsX48eONf9doNHjkkUcghMAzzzxjLPf09ERoaChOnz4NAFi9ejUaN26MRo0ameTWrVs3AEBcXJxxWxcXF+Pfb968iczMTERGRuL06dO4efOmcf8AsGHDBhQWFlZYW4ns3b3/HwsLC3Ht2jXUr18fnp6exp93ssTGxqJdu3bo0qWLsczNzQ3PPfcc0tLScPToUanHq+oM30cvv/yySfm0adMAQFG3D9kZspIGDRqY/DskJARqtdrkPni9evVM1snIyEBWVhYWLVoEHx8fkz9jx44FcPf2DXD3Pnv9+vWL3EMPDQ2tgNZUrsDAQJN/16hRA87OzsZLu/eW37hxAwCQmpqKI0eOFMmtYcOGAP6XGwAkJiaiR48eqF69Ojw9PeHj44O33noLAIydocjISAwZMgQxMTHw9vbGwIEDsXjxYpsek0VUFeXl5eG9995DQEAAnJyc4O3tDR8fH2RlZRn/P8py9uxZsz8jGzdubFxuT86ePQu1Wo369eublNepUweenp6KyoNjhqoIcwP/7v2NCLg7owwAnnrqKYwZM8bsflq0aCG/clWMRqMpVRlw92oacDe75s2bY968eWbXCwgIAACcOnUK3bt3R6NGjTBv3jwEBASgWrVqiI2Nxfz5843nQKVSYc2aNdi9ezfWr1+PP/74A+PGjcOnn36K3bt3w83NTUZTieze5MmTsXjxYkydOhUdO3ZEjRo1oFKpMGLECOP/R8D8z1AA0Ol0lVVVxSouWyVhZ8hKUlNTTa78nDx5Enq9HsHBwcVu4+PjA3d3d+h0OvTo0aPE/QcFBeGff/6BEMLkg3zixIly190WhYSE4NChQ+jevXuJ/7HXr1+PgoICrFu3zuQK1L230e7VoUMHdOjQAR9++CFWrlyJJ598Ej/++KPJrTwistyaNWswZswYk9mh+fn5RZ6HU7NmTQBAVlaW8TY2YP5qTnE/A4KCgsz+jDx+/LhxuVKZyyQoKAh6vR6pqanGq2PA3YHmWVlZJnnYeoeJt8ms5N///rfJv7/88ksAQN++fYvdRqPRYMiQIVi7di3++eefIsszMjKMf4+OjsbFixexZs0aY1lubi4WLVpU3qrbpOHDh+PChQv45ptviizLy8vD7du3AfzvCpPhihJw99bY4sWLTba5ceOGyToAEBYWBgC8VUYkkUajKfJ/7csvvyxyxSckJAQAkJCQYCy7ffs2li5dWmSf1atXN/twwejoaOzduxdJSUkm+1i0aBGCg4NNxnAqTfXq1QHAJJfo6GgAd2eL3ctwhb1fv34m21v6wMaqgFeGrOTMmTN49NFH0adPHyQlJeH777/HyJEj0bJlyxK3mzNnDuLi4tC+fXs8++yzaNKkCa5fv44DBw5g69atuH79OgDg2WefxVdffYXRo0dj//798PPzw/Lly8s8DVUpRo0ahVWrVuH5559HXFwcOnfuDJ1Oh+PHj2PVqlX4448/8Mgjj6BXr16oVq0aBgwYgH/961+4desWvvnmG/j6+uLSpUvG/S1duhQLFizA4MGDERISgpycHHzzzTfw8PAw/gAhovLr378/li9fjho1aqBJkyZISkrC1q1bUatWLZP1evXqhcDAQDzzzDN49dVXodFo8N1338HHxwfp6ekm67Zp0wYLFy7EzJkzUb9+ffj6+qJbt25444038MMPP6Bv376YMmUKvLy8sHTpUpw5cwZr166FWq3c6wdt2rQBALz99tsYMWIEHB0dMWDAAIwZMwaLFi1CVlYWIiMjsXfvXixduhSDBg1CVFSUyfbmMrUZ1pvIZp8M0z+PHj0qhg4dKtzd3UXNmjXFpEmTRF5ennE9AGLixIlm93HlyhUxceJEERAQIBwdHUWdOnVE9+7dxaJFi0zWO3v2rHj00UeFq6ur8Pb2Fi+++KL4/fffbX5qfUZGhkn5mDFjRPXq1YusHxkZKZo2bWr89507d8TcuXNF06ZNhZOTk6hZs6Zo06aNiImJETdv3jSut27dOtGiRQvh7OwsgoODxdy5c8V3331nMu30wIED4oknnhCBgYHCyclJ+Pr6iv79+4u//vqrYhpPZKdu3Lghxo4dK7y9vYWbm5vo3bu3OH78uAgKCjI+OsNg//79on379qJatWoiMDBQzJs3z+zU+suXL4t+/foJd3d3AcBkSvipU6fE0KFDhaenp3B2dhbt2rUTGzZsqJzGWtkHH3wgHnroIaFWq42ZFRYWipiYGFGvXj3h6OgoAgICxJtvviny8/NNti0pU1ugEuK+649UoWbMmIGYmBhkZGQUmf1ERERElU+51/yIiIiISoGdISIiIrJr7AwRERGRXeOYISIiIrJrvDJEREREdo2dISIiIrJr7AwRERGRXWNniIiIiOwaO0NERERk19gZIqpkS5YsgUqlwpIlS6xdlVKZMWMGVCoV4uPjrV2VKi0tLQ0qlQpPP/20VY7ftWtXm39zuGzWPidkO9gZIpLM1n4Ax8fHQ6VSYcaMGdauSpXHDkfZBQcHIzg42NrVICoR31pPVMkGDx6MDh06wM/Pz9pVKZVJkyZhxIgRCAwMtHZViIgqBDtDRJWsRo0aqFGjhrWrUWre3t58qTARKRpvk5HdS0hIwIABA+Dt7Q0nJyc0aNAA77zzDnJzc4usu3btWkRGRsLX1xfOzs7w9/dHjx49sHbtWgB3xwPVq1cPALB06VKoVCrjH8OYm+LGDKlUKnTt2hUXLlzAyJEj4e3tDXd3d/Tr1w+nT58GABw7dgyDBg2Cl5cX3N3dMXToUFy5cqVIPb/77jsMHDgQwcHBcHZ2hpeXF3r37o24uDiT9WbMmIGoqCgAQExMjEl909LSjOsUN2Zo/fr1iIqKQo0aNeDi4oKWLVti3rx50Gq1Juvde+vw5MmTGDx4MGrWrInq1aujR48eOHToUMkn6R5PP/00VCoVTp8+jU8++QQNGzaEi4sLmjRpgh9//BEAcOfOHbz99tvG9rdo0QKbNm0yu7+cnBxMnz4dTZs2hYuLCzw9PdG7d2/8+eefJuupVCrs2LHD+HfDH3O3Q8vSxn/++QfDhw+Hr68vnJycUK9ePUydOhXXrl0zu/6ff/6JyMhIVK9eHbVq1cLjjz+Oc+fOlTY+E4sXL0b79u3h5uYGNzc3tG/f3uxYtntvpe7atQu9evWCp6dnibcMDef87NmzOHv2rElm99+SLW09LF2f6EF4ZYjs2sKFCzFx4kR4enpiwIAB8PX1xV9//YUPP/wQcXFxiIuLQ7Vq1YzrTpgwAX5+fhg8eDBq1aqFy5cvY+/evfjll18wZMgQhIWF4cUXX8Tnn3+Oli1bYtCgQcZjlWbcxI0bN9ClSxfUqVMHY8aMQUpKCjZs2IDjx4/jt99+Q3h4ONq0aYNx48Zh//79WLt2La5fv47t27eb7GfixIlo2bIlevToAR8fH1y4cAG//vorevTogZ9//hkDBw4EcHcMTFpaGpYuXYrIyEh07drVuA9PT88S6zpv3jxMmzYNXl5eGDlyJKpXr45169Zh2rRp2LlzJ37++eciX5ZpaWno0KEDmjZtinHjxuHUqVP47bffEBUVhWPHjqF27doPzMjg5Zdfxp49ezBgwABoNBr8+OOPGDlyJGrWrIkvv/wSR48eRb9+/ZCfn4+VK1di4MCBOHbsGEJCQoz7uH79OiIiInDkyBF07twZzz//PLKzs411Wr16tfEcTp8+HUuWLMHZs2cxffp04z7CwsIsbuOff/6J3r17486dOxg6dCiCg4ORlJSEzz//HBs2bMDu3btNrspt27YNffv2hVqtxuOPPw5/f39s27YNnTt3Rs2aNUudHQBMmTIFX375JR566CE888wzAO529seOHYuDBw/i888/L7LNrl27MGvWLERFReG5555Denp6sfv39PTE9OnT8dlnnwEApk6dalx27+esrPWwpN5EDySI7NSRI0eEg4ODaNmypcjMzDRZNnv2bAFAfPLJJ8ay1q1bi2rVqokrV64U2de92585c0YAEGPGjDF73MWLFwsAYvHixSblAAQA8dJLL5mUv/DCCwKA8PT0FJ999pmxXK/Xi+joaAFA7N+/32Sb06dPFznuxYsXhb+/v2jQoIFJeVxcnAAgpk+fbra+06dPFwBEXFycsezkyZPCwcFB+Pr6ivT0dGN5fn6+6NKliwAgli1bZiw3ZAJAzJkzx2T/77zzjgAgZs+ebfb49xszZowAIBo2bCiuXr1qLN+zZ48xpy5duohbt24Zl/30008CgJg8ebLJvkaOHCkAiG+++cak/MqVKyIgIED4+PiIvLw8Y3lkZKQo7sdmWduo0+lESEiIACB+//13k/VfffVVAUCMGzfOZP2HH35YqFQqsXPnTmO5Xq83tqO0P9J37NghAIjGjRuLrKwsY/n169dFw4YNBQCRkJBgLDd8RgCI7777rlTHMAgKChJBQUFS6lHW9R/0f5HIgJ0hsltTpkwp8sPTQKfTCR8fH9GmTRtjWevWrUX16tXF9evXS9xveTpDbm5u4vbt2yblCQkJAoAICQkRer3eZNmyZcvK9AU1efJkAUCkpaUZyyzpDL3//vsCgJg7d26R9RMTEwUA0a1bN2OZIZN69eoJnU5nsr5h2WOPPVaqNhg6Q0uXLi2y7OGHHxYAxI4dO0zKtVqtcHR0FBEREcayjIwModFoTOp5ry+++EIAEOvXrzeWlaYzVNo2Gs5r3759i+wrJydHeHl5CWdnZ1FQUCCE+F9HYMCAAUXWT0tLExqNptSdoXHjxgkA4qeffiqybMWKFUU6YobPSOvWrUu1/3uV1Bkqaz3Kuj47Q1RavE1Gdmv37t0AgD/++APbtm0rstzR0RHHjx83/nvEiBF47bXX0KxZM4wcORJRUVHo0qULPDw8pNWpQYMGcHV1NSkzzDpr0aJFkdtOhmUXL140KT99+jRmz56N7du348KFCygoKDBZfvHiRQQFBVlcz4MHDwIwvd1h0LFjRzg7OyM5ObnIsrCwMKjVpkMV69atCwDIysoqUx3uvz0F3M3j9OnTRZZpNBr4+vqa5LRv3z7odDoUFBSYfaxAamoqAOD48ePo379/mepVmjaWlKGbmxseeeQRbN68GSdOnEDz5s2NY47Cw8OLrB8UFISAgADjOK8HKenYhjFk5s5f27ZtS7X/0iprPSytN9GDsDNEduv69esAgA8//LBU67/yyiuoVasWFi5ciE8//RSffPIJHBwc0K9fP8yfP984cLo8zHWsHBwcHrissLDQWHby5Em0a9cO2dnZiIqKwoABA+Dh4QG1Wo34+Hjs2LGjSOeorLKzswHA7BgflUqF2rVr48KFC0WWldQGnU5XpjpYktW9ORnOf2JiIhITE4s9zu3bt6XV6942lpQh8L+OrmG9mzdvAgB8fX3Nrl+7du1Sd4ays7OhVqvh4+Njdj8qlcp43PuXyVTWelhab6IHYWeI7JbhSys7Oxvu7u4PXF+lUmHcuHEYN24crl27hp07d+KHH37AqlWrkJqaisOHD0Oj0VR0tR9o/vz5uHHjBpYvX46nnnrKZNnzzz9vnBFVHobsrly5UuQKkxACV65ckXrFrCIY6jdt2jR88sknVju+udmAAHD58mWT9QyPY7h69arZ9YvbT3HH1uv1yMjIKNK5unr1KoQQZs+f7AdOlrUeltab6EE4tZ7sVvv27QH873ZZWdSqVQuDBg3CTz/9hG7duuHo0aM4efIkABg7RGW90iHLqVOnAMA4Y8xACGH2Cogl9W3VqhUAmJ1uv2fPHuTn55u9jVWVtG3bFiqVCklJSaXeRua5LSnD27dv46+//oKLiwtCQ0MBAC1btgQA7Ny5s8j6Z8+eLdP0+pKObSiTdf40Gk2xeZW1HpVZb7Iv7AyR3ZowYQIcHBwwefJks1OEs7KyjGMUgLs/bIUQJusUFhYab7c4OzsDAGrWrAmVSmXxs1/Ky3Cl5v7n5MyZMwf//PNPkfW9vLwAoEz1HTlyJBwcHDBv3jyTcTh37tzB66+/DgBV/nUkderUwfDhw7Fr1y58/PHHRc4tcLdjd+/zpizJqjidO3dGSEgINm3ahK1bt5osmzlzJq5du4YnnnjC+GiHLl26oF69etiwYYPJuRVC4K233ipTB23MmDEA7j5b6t7bSjdv3kRMTIzJOuXl5eWFzMxM5Ofnl7selVlvsi+8TUZ2q1mzZliwYAFeeOEFhIaGIjo6GiEhIcjJycHp06exY8cOPP300/j6668BAIMGDYKHhwc6dOiAoKAgFBYWYsuWLTh69CiGDh1q7IS4ubmhbdu2SEhIwKhRo9CgQQOo1WqMGjWqXIOWS+v555/H4sWLMWTIEAwfPhy1atXC7t27ceDAAfTr1w8bN240Wb9Ro0bw9/fHjz/+CCcnJ9StWxcqlQqTJ08u9knZISEhmDt3LqZNm4YWLVpg+PDhqF69OtavX48TJ05g4MCBRW7RVUULFizAiRMn8Nprr2H58uXo2LEjPD09ce7cOfz1119ITU3FpUuXjIPau3XrhjVr1mDIkCHo27cvnJ2d0bJlSwwYMKDMx1ar1ViyZAl69+6N6OhoDBs2DEFBQUhKSkJ8fDxCQkIwZ84ck/UXLVqE6Oho9OjRw/icoe3bt+PSpUto0aIFDh8+XKpjR0REYPLkyfjyyy/RrFkzDBkyBEIIrF27FufPn8eUKVMQERFR5jaZ061bN/z111/o27cvwsPDUa1aNURERBj/lKUelVlvsjNWmsVGVGXs3btXjBgxQvj7+wtHR0fh7e0tWrduLd544w1x7Ngx43oLFiwQjz76qAgKChLOzs6iVq1aol27dmLhwoXizp07Jvs8ceKEiI6OFp6enkKlUplMTS9pan1kZGSR+pU0Pbi4afFxcXGic+fOwt3dXXh6eoro6Gixf/9+s9PkhRBi9+7dIjIyUri7uxufJ3PmzBkhhPmp9Qa//fabcTsnJyfRvHlz8emnn4rCwsJSt6GktptjmFpvqN+9Spr6XtwU79zcXPHRRx+JNm3aiOrVqwsXFxdRr149MWjQILFs2TKTthQWForXXntNBAYGCgcHB5M2WdrGw4cPi6FDhwpvb2/h6OgogoKCxIsvvigyMjLM7ichIUFEREQIFxcX4eXlJYYNGybOnj1bYtuL891334m2bdsKV1dX4erqKtq2bWv2MQ0PevxCSXJycsSzzz4r/Pz8jNP/799PaetR1vU5tZ5KSyWEmWvDRERERHaCY4aIiIjIrrEzRERERHaNnSEiIiKya+wMERERkV1jZ4iIiIjsGjtDREREZNfYGSIiIiK7xs4QERER2TV2hoiIiMiusTNEREREdo2dISIiIrJr7AwRERGRXfs/PQeEu7LIY8QAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "folder='survey'\n", + "do_save=False\n", + "\n", + "# IDLE TMP\n", + "\n", + "meas_calc_list = {\n", + " 'time' : ['CT:pred', 'CT:meas', 'CC:on', 'ECO2AI', 'GA:auto-para', 'TAPO'],\n", + " 'energy_consumed' : ['CT:pred', 'CT:meas', 'CC:on', 'ECO2AI', 'GA:auto-para', 'TAPO'],\n", + " 'co2_emissions' : ['CT:pred', 'CT:meas', 'CC:on', 'ECO2AI', 'GA:auto-para']\n", + "}\n", + "\n", + "exp = 'idle'\n", + "file_name = 'res-sophIA-idle.json'\n", + "name = 'idle'\n", + "\n", + "with open(os.path.join(folder, file_name), 'r') as file:\n", + " d = json.load(file)\n", + "d[exp]['training']['linux_alienware']['cuda']['TAPO:tot'] = copy.deepcopy(d[exp]['training']['linux_alienware']['cuda']['TAPO'])\n", + "new_file_name = 'res-sophIA-idle-tmp.json'\n", + "with open(os.path.join(folder, new_file_name), 'w') as file:\n", + " json.dump(d, file, indent = 4, sort_keys=True)\n", + "\n", + "# five time 10 minutes experiments, with 2 minutes breaks\n", + "# computer not in sleep mode, but done doing any computing task\n", + "\n", + "idle_power = extract_power(folder, new_file_name, meas_calc_list)\n", + "print(idle_power)\n", + "\n", + "meas_calc_list = {\n", + " 'time' : ['CT:pred', 'CT:meas', 'CC:on', 'ECO2AI', 'GA:auto-para', 'TAPO:tot'],\n", + " 'energy_consumed' : ['CT:pred', 'CT:meas', 'CC:on', 'ECO2AI', 'GA:auto-para', 'TAPO:tot'],\n", + " 'co2_emissions' : ['CT:pred', 'CT:meas', 'CC:on', 'ECO2AI', 'GA:auto-para']\n", + "}\n", + "\n", + "plot_all_error_bar(folder, new_file_name, name, exp, meas_calc_list, to_save=do_save)\n", + "# plot_evolution(folder, file_name, name, exp, meas_calc_list, to_save=do_save)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "0b356c38", + "metadata": {}, + "outputs": [], + "source": [ + "def total_to_dynamic_EPM(exp, folder, file_name, new_file_name, meas_calc_list, idle_power):\n", + " with open(os.path.join(folder, file_name), 'r') as file:\n", + " d = json.load(file)\n", + " \n", + " calc = 'TAPO'\n", + " times = d[exp]['training']['linux_alienware']['cuda'][calc]['time']\n", + " nrjs = d[exp]['training']['linux_alienware']['cuda'][calc]['energy_consumed']\n", + " dyn_nrjs = [nrj - t/3600*idle_power[calc]/1000 for t, nrj in zip(times, nrjs)]\n", + "\n", + " d[exp]['training']['linux_alienware']['cuda']['TAPO:tot'] = copy.deepcopy(d[exp]['training']['linux_alienware']['cuda']['TAPO'])\n", + " d[exp]['training']['linux_alienware']['cuda']['TAPO:dyn'] = copy.deepcopy(d[exp]['training']['linux_alienware']['cuda']['TAPO'])\n", + "\n", + " d[exp]['training']['linux_alienware']['cuda']['TAPO:tot']['energy_consumed'] = nrjs\n", + " d[exp]['training']['linux_alienware']['cuda']['TAPO:dyn']['energy_consumed'] = dyn_nrjs\n", + "\n", + " # dev meas comp ml\n", + " with open(os.path.join(folder, new_file_name), 'w') as file:\n", + " json.dump(d, file, indent = 4, sort_keys=True)\n", + "\n", + "meas_calc_list = {\n", + " 'time' : ['CT:pred', 'CT:meas', 'CC:on', 'ECO2AI', 'GA:def', 'GA:auto-para', 'TAPO'],\n", + " 'energy_consumed' : ['CT:pred', 'CT:meas', 'CC:on', 'ECO2AI', 'GA:def', 'GA:auto-para', 'FLOPS', 'TAPO'],\n", + " 'co2_emissions' : ['CT:pred', 'CT:meas', 'CC:on', 'ECO2AI', 'GA:def', 'GA:auto-para']\n", + "}\n", + "meas_calc_list_2 = {\n", + " 'time' : ['CT:pred', 'CT:meas', 'CC:on', 'ECO2AI', 'GA:def', 'GA:auto-para', 'TAPO:dyn', 'TAPO:tot'],\n", + " 'energy_consumed' : ['CT:pred', 'CT:meas', 'CC:on', 'ECO2AI', 'GA:def', 'GA:auto-para', 'FLOPS', 'TAPO:dyn', 'TAPO:tot'],\n", + " 'co2_emissions' : ['CT:pred', 'CT:meas', 'CC:on', 'ECO2AI', 'GA:def', 'GA:auto-para']\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "7e23bc0e", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkgAAAHPCAYAAACoQyVSAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAACQgUlEQVR4nO3dd1gUV9sG8Ht26QIiUkSlKCoaRLBiAwULwd41JlGjqfZoNJo3eVFfE01iSWKiiSmoscQWEwt2sIJd1KgUBQRFBUQFpMjunu8Pv924sAu7wwK7M8/vurgSz5R9bmZhDzPnzHCMMQZCCCGEEKIiqe0CCCGEEEKMDXWQCCGEEELKoA4SIYQQQkgZ1EEihBBCCCmDOkiEEEIIIWVQB4kQQgghpAzqIBFCCCGElEEdJEIIIYSQMqiDRAghhBBSBnWQCDEgjuP0/urZs2e11LJgwQJwHIcFCxYYZH9paWngOA5eXl4G2Z/Y3Lx5E7NmzULbtm1Rv359mJubo379+ujSpQvmz5+Pmzdvqq2v/H5zHIe0tDS1ZRMmTKj0fTVkyBCNdfj7+4PjOFhaWuLRo0cV1qzpdczMzODk5IQePXrghx9+QGlpqdbto6KisGDBAgwcOBANGzZU7ePu3buVfr+eP3+OL7/8Ev7+/qhTpw7q1auHnj17YseOHZVuS4ghmNV2AYQIyfjx48u1PXjwAAcPHtS6vGXLltVeF6k9MpkMc+bMwXfffQeFQgFHR0d07NgR9evXx5MnT3Dx4kWcOXMGX331Fb799ltMnTpV5317e3uje/fuGpe1a9euXNv58+dx9epVAC86IBs3bsSMGTP0ep3i4mIkJCTgxIkTOHHiBLZs2YLDhw/D2tq63HZjx47F06dPdc6jVFhYiD59+iA2NhYODg549dVXUVBQgOjoaBw/fhyzZ8/GsmXL9N4vIXphhJBqFRMTwwCwmv5xy87OZjdv3mTZ2dkG2d/z58/ZzZs32a1btwyyP7EYPXo0A8Ds7e1ZZGQkk8lkassVCgU7ePAga9euHZsxY4aqPTU1VfW+SU1NVdtm/PjxDAAbP368XrW89957DABr1KgRA8D8/PwqXL+i19myZYuqvq+++krj9m+99Rb74osv2IEDB1hWVpZq/YyMjApfd8aMGar6Xn7/Xrhwgdna2jIAbM+ePZUHJqQKqINESDWrrQ4SqX2//vorA8DMzc3ZmTNnKlz3+fPnLDY2VvVvQ3eQnj17xuzt7RkAFh0dreponDt3Tus2lb1Onz59GAAWFBSkUw26dJByc3OZhYUFA8BOnTpVbvn//vc/BoB17txZp9ckhC8ag0RILXp5nFB6ejomTZoEd3d3mJubY8KECar1/vzzT7z99tto3bo16tWrBysrKzRp0gQTJ05EYmJipft+2bp168BxHCZMmIBnz55h/vz5aNasGSwtLdGgQQOMHz8e9+7dK7e/isYgKceWAMDOnTvRvXt32Nvbo06dOujWrRuioqK0fg/u3LmDCRMmoEGDBrCyskLz5s0RERGB4uJi9OzZExzH4dixY5V+L18mk8nw448/omvXrqhbt65qv9OnT9eYraoZNGGM4fPPPwcAfPDBBwgMDKxwfXNzc3Tp0kWv19DH9u3bkZeXh9atWyMkJASjR48GAPz666+899mmTRsAwMOHDw1SI/Bi3NLz58/h4eGBbt26lVs+duxYAMCZM2eQmZlpsNclpCzqIBFiBJKTk9G2bVtERUUhMDAQgwYNgpOTk2r5qFGjsGXLFlhbWyM0NBRhYWGQSCSIjIxE+/btERsbq/drPn36FF27dsWPP/6IV155BeHh4WCMYcOGDejWrRuvsSMREREYOXIkAKBfv35o3rw5YmNjMWDAAOzatavc+jdu3ECHDh2wfv16SKVSDB48GD4+Pli+fDn69OlT4QBgbUpKShAeHo4PPvgAly9fRrdu3TBkyBCUlJRg1apVCAgIwKVLlwyWQZtr164hJSUFgOaxZzVN2RGaOHGi2n//+OMPFBUV8dpnXl4eAMDV1dUAFb5w+fJlAECHDh00Lm/atCkcHR0BAPHx8QZ7XULKqe1TWIQIXUWX2CIiIlTL3njjDVZcXKxxH3/88QcrKChQa1MoFOyHH35gAJivry9TKBQa9x0REaHWHhkZqXrNsLAw9vTpU9Wy3NxcFhAQwACwL774Qm075SUfT0/PcvUp9+fg4FDuUpKyjhYtWpTbrl27dgwAGzNmjFr2u3fvMh8fH9V+Y2JiNH5fNPn4448ZAObt7a12aer58+ds0qRJDABr0qQJKykpMUgGbZSX1ywsLFhpaanO2ykZ8hJbYmKi6lJfVlaWqr1ly5YMANuwYYPG7Sp6neLiYtakSRMGgH399dc61aHMU9EltmHDhjEAbObMmVrXadOmDQPAvv/+e51elxA+6AwSIUbA0dER33//PSwtLTUuHz16NOrUqaPWxnEcJk+ejC5duuD69evlpolXpk6dOoiMjIS9vb2qrV69epg3bx4A4MiRI3qmABYtWlTuUtL8+fNRt25dJCUlISMjQ9V+8uRJXLp0Cba2tvjhhx/Usjdq1AjLly/X+/WLi4vxww8/AABWrlypdjnQ3Nwc3333HVxdXZGamqp1urg+GSqSnZ0N4MWxNTOrngnD69ev1zrN/2W//fYbAGDQoEFwdnZWtSvPIulzma2kpATx8fEYPnw4UlNT0adPH71m3lUmPz8fAMq9319ma2sL4N8zWIRUB5rmT4gR6N27N+rWrVvhOrdu3cKBAwdw69Yt5OfnQy6XA/h3/EdiYiJeeeUVnV+zQ4cOcHNzK9feqlUrANA6VqciAwcOLNdmaWmJpk2b4vLly7h37x7c3d0BAMePHwcAvPrqq6pLJi/r378/HBwc8OTJE51f/8KFCygoKICjo6PGWmxsbDBmzBh8++23iImJUY1n4ZuhtlU0zV9JJpNh/fr1AP7tECmNGzcOn3zyCU6cOIHbt2/D29tb4z7Wr1+v2sfL3n//ffzwww+QSOhvbSI81EEixAhUdPNFuVyOqVOn4qeffgJjTOt6+v417eHhobFdeUapuLhYr/3pu0/lzQIryu7p6alXB0nZqWvSpInWdZSdAG0dQEN9X5RnanJzcyGXyyGVSnXaTh/du3fHunXrKlxn3759ePDgARo1aoSwsDC1Za6urujXrx92796N3377TTWovKyXO2J5eXm4cOECMjIy8OOPP8LPzw+TJ082SB4AsLOzAwA8e/ZM6zoFBQUAoHb2kxBDo24/IUZA0032lL799lv8+OOPcHV1xebNm5GWloaioiKwF7fpwGuvvQYAFXaeNKmOv/r57LPs5SBdl1UXQ31f2rdvD+DFDRmvXLlikH3yobx8VlxcjB49eqB79+5qX8obR65bt051VrIsZUds3bp1+PPPP5Gamoo5c+YAAGbOnGnQfMoOc3p6utZ1dOlcE1JVdAaJECO3bds2AMBPP/2EQYMGlVuenJxc0yUZRKNGjQCg3GM0Xnbnzh1e+0xNTdW6jnJmmXLd6tKmTRs0adIEqampWL9+vcY7W1e3+/fvq25P8OjRI5w+fVrrupmZmThw4AD69+9f6X6lUim+/PJLnD17FidOnMDs2bN5jVnTRPl9unDhgsblKSkpyM3NBQC0bdvWIK9JiCZ0BokQI6f8MPD09Cy37Pr16yY71Tk4OBgAcODAATx+/Ljc8v3792tsr0iHDh1ga2uL3Nxc7N69u9zyoqIi/PHHHwCAkJAQHlXrjuM4fPLJJwCANWvW4Ny5cxWuL5PJcObMGYPWoDwrFBgYqDrjqOlr7ty5APQbrM1xHFauXAmO43D06FHExMQYpOZ+/frBwsIC6enpGjt0mzdvBgB07twZDRs2NMhrEqIJdZAIMXLKQdM//PADFAqFqv3+/fsYN24cZDJZbZVWJcHBwfD390d+fj6mTZuG58+fq5ZlZmZi9uzZeu/TysoKU6ZMAQDMnj1b7QxUaWkpZsyYgQcPHqBJkyYYMWJE1UNU4u2338aIESNQWlqKPn36YP369eUuYzHGEB0dja5du6o6b4ainL1W2X2Yxo0bBwDYu3evavadLtq1a6e6Z1RERATPKtXVq1cPH3zwAQBg8uTJag/UvXTpEr788ksAwH/+8x+DvB4h2lAHiRAj98knn8DCwgI///wzfHx8MHr0aISHh8Pb2xslJSUYOnRobZfIC8dx2LhxIxwdHbFp0yY0bdoUo0ePxsCBA9GiRQs4Ojqq7ixtYWGh834XLlyIXr164datW2jVqhX69++PMWPGoFmzZvj5559Rv359bN++Xa99VsXmzZsxdepU5OfnY8KECXB1dUV4eDhef/11DBgwAI0aNUKvXr1w6dIltGjRwmCve/z4cdy6dQuWlpYYM2ZMhev6+vqiXbt2KC0txYYNG/R6ncWLF8PMzAwnT57E4cOH1Zb973//Q+fOnVVfSoMGDVK1aRrg/cUXX6BLly64evUqmjdvjhEjRiA8PBydO3dGQUEBZs2ahQEDBuhVJyH6og4SIUYuMDAQFy5cwKBBg/Ds2TPs3r0bt2/fxrRp0xAXF2fSM3lat26Nixcv4s0330RpaSn++usv3Lx5EzNmzMDhw4dVtzB4+a7ilbG0tMSBAwewevVq+Pv74+TJk9i1axfMzc0xbdo0XLlyRTWAuiaYm5tj1apV+OeffzBjxgw0btwYZ86cwbZt2xAbGwsPDw988sknuHnzpkFngykvlw0cOBD16tWrdH3lWSR9Hz3SvHlzTJo0CUD5s0i3b9/G2bNnVV9Kly9fVrXduHGj3D5tbGxw7NgxLFmyBI0aNUJUVBTi4uLQpUsXbNu2jdc9sgjRF8f0nfpCCCE1IDU1Fc2aNYOdnR1yc3PpXjuEkBpFv3EIIbXm2bNnuH79ern2O3fu4PXXX4dCocD48eOpc0QIqXF0BokQUmvS0tLQpEkTeHt7o0WLFrC3t0d6ejouXbqEkpIS+Pv748SJEyZ9GZEQYpqog0QIqTUFBQVYuHAhoqOjkZ6ejidPnsDGxgY+Pj4YPnw4pk2bBhsbm9oukxAiQtRBIoQQQggpgy7sE0IIIYSUQY8a0UKhUCAzMxN2dna18jwoQgghhBgWYwz5+flo2LBhpZM/qIOkRWZmJtzd3Wu7DEIIIYQYWEZGBho3blzhOtRB0sLOzg7Ai2+iMc2gUfZ+xXRmizJTZqGizJRZqIw1c15eHtzd3VWf8RWhDpIWygNqb29vVB2k0tJSHDt2DP369YO5uXltl1MjKDNlFirKTJmFytgz69Jpo0HahBBCCCFlUAeJEEIIIaQM6iCZIDMz8V0ZpcziQJnFgTKLg6lnphtFapGXl4e6devi6dOnRjUGiRBCCCH86PPZTmeQTIxCoUBWVhYUCkVtl1JjKLM4UGZxoMziIITM1EEyMXK5HHFxcZDL5bVdSo2hzOJAmcWBMouDEDJTB4kQQgghpAzqIBFCCCGElFGlIeZ37tzB3bt3kZOTAxsbGzg7O6Nly5awsrIyVH2kDI7jjO7OpNWNMosDZRYHyiwOQsis9yy2mJgYrFu3DkePHsX9+/fLLTc3N0eHDh0wdOhQTJgwAfXr1zdYsTWJZrERQgghwqLPZ7vOHaRt27YhIiICSUlJYIzB3d0dHTp0gKurKxwdHVFUVITc3FwkJibi8uXLKCkpgaWlJd544w0sWrQIbm5uBglXU4y1g6RQKJCRkQF3d/dKn0QsFJSZMgsVZabMQmWsmfX5bNfpElvnzp1x7tw5tGvXDsuXL8fIkSPRqFEjreuXlpbixIkT2LhxI7Zt24Y//vgDGzZswNChQ/VLQsqRy+WIj49Hw4YNjepNV50oM2U2VowxFBYW8ttWJsOz82dw+9RJ1O3dG+Zt2gFSqd77sbGxManLGKZ4nKuKMptmZp06SBYWFjhy5AhCQ0N12qm5uTl69eqFXr16YeXKlVi+fDnS09OrVCghhBibwsJC2Nra6r3dIOd6+KqFBxpbWSIQAE4dxt3iEsxNSsfu7Md67augoAB16tTRuwZCSMV06iCdOHGC9ws4ODjgf//7H+/tCSFESAY518NGv2bl2htaWmCjXzO8ce2W3p0kQojhmfaDUkSI4zg4Ozub1Cn1qqLM4mCKmW1sbFBQUKD7BnI58M5Y4FF2uUWS/8+9ObQrsHazzpfbbGxsdH99I2CKx7mqKLNpomexaWGsg7QJIaZLHn8RxR9NrnQ9q2WrIQ1oXwMVESIuBh+krUl2djYiIyNx/vx5PHnyROPtxDmOw9GjR/m+BNFALpcjOTkZzZs3h5THgE5TRJkps1Cw3ByDrmeKxHCcy6LMppmZVwfp6tWrCA0NxePHj1HRCShTPrVmrBQKBRITE+Ht7W2ybzp9UWbKbKz0nsVmo9uA7hIbW5Q8e6bbLk1sFpspHueqosymmZlXB2n27NnIzc3Fp59+ikmTJqFx48Ym+w0ghBC+9J3FJgFwo5s/GlpaqMYcvUzBGO6VPIdvl+7Q9RnoNIuNkOrB6+YEcXFxGDJkCBYtWgRPT0/qHBFCiA4UAOYmvbjliaLM2Xflvz9OSte5c0QIqT68ziBZWFjA29vb0LUQHUgkEnh4eJjsjbf4oMziYIqZ9Z7FphR3AuznH9Rms0mcXYC3p2Bzl2C9azAlpnicq4oymyZes9iGDh2KJ0+eICYmpjpqMgo0i40QUp2YXA7FtXiw3Bxwjk6Q+AWAo7PxhFQrfT7beXXtli1bhn/++QfLli3jVSDhTy6X4/LlyxpnDQoVZRYHsWXmpFLALwDX6rkAIuocie04A5TZVOl0iW3ixInl2lq3bo2PP/4YP/74IwICAjT2xDiOw6+//lr1KomKQqFAeno6WrduLZqxX5SZMgsVZabMQiWEzDp1kNatW6d1WUpKClJSUjQuow4SIYQQQkyRTh2k1NTU6q6DEEIIIcRo6NRB8vT0rO46iI4kEgl8fHxMemaAviizOFBmcaDM4iCEzDrPYouMjERoaKhoOks0i40QQggRlmqZxTZp0iQ0bdoUTZs2xaRJk7Bx40bcu3evysUS/chkMsTGxkImk9V2KTWGMosDZRYHyiwOQsiscwdp+vTp8PPzw507dxAZGYnx48fDw8MDLVq0wHvvvYetW7fi4cOH1VkrwYtnP2VnZ1f4DDyhocziQJnFgTKLgxAy63wn7W+++QYA8PjxYxw/fhwxMTE4duwY/vnnH9y6dQu//PILAKBly5YICQlBSEgIevbsifr161dL4YQQQggh1UXvR43Uq1cPQ4YMwZAhQwAAubm5OHbsmKrDdOPGDSQkJGDNmjWQSCQoLS01dM2EEEIIIdWqysPLHR0dMWzYMKxatQoXL17E1q1b4evrC8YYFAp65KKhSaVSBAQEmOyNt/igzOJAmcWBMouDEDLzehabkkwmw9mzZxETE4OYmBjExcWhpKQEjDG0bNkSPXr0wJo1awxZb42hWWyEEEKIsFTbs9gUCgXOnj2LpUuXIiwsDA4ODggODkZERASysrIwadIkbN26FQ8ePMCNGzdMtnNkzGQyGaKjo016ZoC+KLM4UGZxoMziIITMOo9B6tevH06fPo2CggJwHAc/Pz+8/fbb6NGjB4KDg2kwdg1hjCE/P9+kZwboizKLA2UWB8osDkLIrHMH6cCBA5BIJBg2bBj++9//ws/PrzrrIoQQQgipNTpfYhs0aBDq1q2LnTt3IiAgAM2aNcM777yDTZs24e7du9VZIyGEEEJIjdJrkDZjDPHx8aop/SdPnsTTp0/BcRy8vLzQs2dP1Ze7u3t11l3tjHWQtkKhQE5ODpycnEz6GTf6oMyUWagoM2UWKmPNrM9ne5VmsSkUCly+fBnR0dE4duwYTp06hfz8fFWHKSQkRHUDSVNjrB0kQgghhPBTbbPYym0skaB9+/aYM2cO9u3bh8ePH2P79u3w9fVFamoqIiMjq7J7okFpaSn27dsnqhtwUmZxoMziQJnFQQiZ9b6Tdln3799XXXKLiYlBSkqKapkp3yDKmJnytEm+KLM4UGZxoMziYOqZ9e4gPXz4UO3RIsnJyQBejE/iOA7+/v6qZ7EFBwcbvGBCCCGEkOqmcwdp8uTJOHbsGBITEwH82yFq3bo1evbsiZCQEPTo0QP16tWrtmIJIYQQQmqCzoO0laPQW7VqpTpD1LNnT8HeINJYB2krb75lZ2cHjuNqu5waQZkps1BRZsosVMaaWZ/Pdp3PIG3ZsgU9e/aEq6trlQskVWNtbV3bJdQ4yiwOlFkcKLM4mHpmnWexjR49mjpHRkAmkyEqKsrkB7/pgzKLA2UWB8osDkLIrFMHKT09vcovdO/evSrvgxBCCCGkJujUQWrevDmmTJmC1NRUvXZeWlqKLVu2wNfXF7/++iuvAgkhhBBCappOHaQvv/wSW7duRbNmzdCjRw+sWrUK58+f13gDqLt372Lnzp14//334ebmhjfeeAOenp4YO3asTgWdP38eU6dOha+vL+rUqQMPDw+MGjUKSUlJOm3/5MkTvPvuu3B2dkadOnUQEhKCS5cu6bQtIYQQQgigxyy2J0+eYMWKFfj1119x//59cBwHiUQCBwcHODg4oLi4GLm5uSguLn6xY45DWFgYZs+ejdDQUJ0LGjFiBE6fPo2RI0eiTZs2ePDgAb7//nsUFBTgzJkzaN26tdZtFQoFgoKCcOXKFcyZMwdOTk5YvXo1MjIycPHiRTRv3lznOox5FptMJoOZmZlRzQyoTpSZMgsVZabMQmWsmav1WWxyuRz79+/H0aNHERsbi7t37+LRo0ewtraGs7Mz/Pz80KNHDwwePBienp56Fx8bG4sOHTrAwsJC1ZacnAw/Pz+MGDECGzdu1Lrttm3bMHr0aGzfvh0jRowAAGRnZ6NFixYIDw/H5s2bda7DmDtIxjh1sjpRZsosVJSZMguVsWau1mexSaVSDBgwACtXrsTZs2dx7949FBcX4/Hjx0hKSsLOnTsxffp0Xp0jAOjatata5wh4MQbK19cXN2/erHDbHTt2wNXVFcOGDVO1OTs7Y9SoUfj7779RUlLCqyZjIpPJEBMTY9IzA/RFmcWBMosDZRYHIWSu8rPYagJjDA8fPoSvr2+F612+fBnt2rVT3dRSqVOnTli7di2SkpLg5+encduSkhK1DlReXh6AFwPNlWOtJBIJpFIp5HI5FAqFal1lu0wmw8sn5KRSKSQSidb2smO4zMxeHI6yb6iX25XblJaWwtzcHAqFAnK5XLUux3EwMzPT2q6t9trM9DJNmZTrKBQKtf2bciZdjhMAo3rvGSJTRcdJWZNcLoe5ubkgMpVtL1u7cp2ytZhyJl2Pk/K/QsqkrXblcsaY6tKTqWeqrF1J0+/s2sykz8NzTaKDtGnTJty7dw+LFi2qcL379+9rfP6bm5sbACAzM1NrB2nJkiVYuHBhufZDhw7BxsYGAODh4YG2bdvi6tWrarc+8PHxQcuWLXHu3DlkZ2er2gMCAuDp6YkTJ04gPz9f1d6lSxe4uLjg0KFDam+qkJAQWFtbIyoqSq2Gfv36oaioCDExMaq2mJgY9O/fHzk5OYiLi1O129nZITQ0FBkZGYiPj1e1Ozs7o2vXrkhOTlY9LsbYMpmZmWnMZGtrC+DFrSKuXbsmiEyVHafbt28DAA4fPiyYTLoep9u3b8PX11dQmbQdJ+Xvo9jYWBQUFAgiU2XHSVmj8r0thEy6HieZTIbi4mJBZdJ2nPr27Qvg3+NsLJmUJz90ofcYpJqWkJCAwMBA+Pr64uTJk5BKpVrXlUqleO+997B69Wq19ujoaPTq1Qu7du3CkCFDNG6r6QySu7s7cnJyVNcpjaGHXlpaiujoaISGhsLGxkZwf3VoO4MUHR2N3r17q50dNOVMlR2n4uJiHDlyBKGhoTA3NxdEJl3OICmPs5WVlSAylW3XdAZJeZyVdZl6psqOU2Fhoep3mLm5uSAy6XIGKTo6Gn379lWdHTX1TJW1A8DBgwdVx9lYMuXl5cHJyal6BmnXpAcPHqBbt24oLS3FmTNn0LBhwwrXt7W1xejRo8vdcykqKgr9+/fHgQMHEBYWptNrG+sgbUIIIYTwU62DtGvK06dPER4ejidPnuDAgQOVdo6AF5fS7t+/X65d2abLPoydQqFAVlaWWm9a6CizOFBmcaDM4iCEzEbZQSouLsbAgQORlJSEvXv34pVXXtFpu4CAAFy6dKncATl79ixsbGzQokWL6ii3RsnlcsTFxamdnhQ6yiwOlFkcKLM4CCGz0XWQ5HI5Ro8ejbi4OGzfvh1dunTRuN79+/eRkJCgdp10xIgRePjwIf78809VW05ODrZv346BAwfC0tKy2usnhBBCiOkzullss2fPxu7duzFw4EDk5uaWuzHkG2+8AQCYP38+1q9fj9TUVHh5eQF40UHq3Lkz3nrrLdy4cUN1J225XK5xhhohhBBCiCZV6iCdO3cO58+fx5MnTzSeRuM4Dp999ple+1RO/duzZw/27NlTbrmyg6SJVCpFVFQU5syZg++++w5FRUXo2LEj1q1bBx8fH73qMFYcxxndnUmrG2UWB8osDpRZHISQmdcsttzcXAwZMgSnT59GRZtzHGey1x9pFhshhBAiLPp8tvM6gzRr1iycOnUKPXv2xPjx49G4cWO1e3iQ6qNQKJCRkQF3d/dydwwXKspMmYWKMlNmoRJCZl69mr1796JTp044evSoSZ8+M0VyuRzx8fFo2LChyb7p9EWZKbNQUWbKLFRCyMyr6qKiIgQHB1PniBBCCCGCxKuDFBAQgLS0NAOXQgghhBBiHHh1kCIiIrB7926cOXPG0PWQSnAcB2dnZ1GdvaPM4kCZxYEyi4MQMus0i23Dhg3l2v7++2/s3bsXr7/+Otq1a6d1NPi4ceOqXmUtoFlshBBCiLDo89muUwdJIpGU6wWW3UzTcprmb3hyuRzJyclo3rw5pFJpbZdTIygzZRYqykyZhcpYMxt8mn9kZKRBCiNVp1AokJiYCG9vb6N601UnykyZhYoyU2ahEkJmnTpI48ePr+46CCGEEEKMhmnenIAQQgghpBrx6iDt3bsXw4YNQ2ZmpsblmZmZGDZsGPbv31+l4kh5EokEHh4eJnvjLT4oszhQZnGgzOIghMy8nsUWHh6OzMxMXLlyRes6bdu2RaNGjbB3794qFVhbjHWQNiGEEEL40eeznVfX7sqVKwgMDKxwncDAQMTHx/PZPamAXC7H5cuXTXZ2IB+UWRwoszhQZnEQQmZeHaTc3Fy4uLhUuI6TkxNycnJ4FUW0UygUSE9Ph0KhqO1SagxlFgfKLA6UWRyEkJlXB8nZ2RmJiYkVrpOYmAhHR0deRRFCCCGE1CZeHaTg4GDs2bMHV69e1bj8ypUr2L17N3r06FGl4gghhBBCagOvDtLHH38MAOjevTsWLVqEuLg4pKenIy4uDgsXLkRQUBAkEgnmz59v0GLJi5kBPj4+Jj0zQF+UWRwoszhQZnEQQmZes9gAYOfOnRg/fjyKiorU2hljsLW1xYYNGzBkyBBD1FgraBYbIYQQIizVPosNAIYPH46UlBQsWbIEw4YNQ69evTB8+HB89dVXuH37tkl3joyZTCZDbGwsZDJZbZdSYyizOFBmcaDM4iCEzDo9akQbFxcXzJ0711C1EB0wxpCdnV3uYcFCRpnFgTKLA2UWByFkNt2Lg4QQQggh1aRKHaRNmzahT58+cHZ2hqWlJZydndGnTx9s3rzZUPURQgghhNQ4XpfY5HI5Ro0ahb/++guMMVhZWaFhw4Z4+PAhjh49iujoaOzcuRPbt2836RHsxkgqlSIgIABSqbS2S6kxlFkcKLM4UGZxEEJmXr2X7777Drt27UK3bt1w+vRpFBYWIjU1FYWFhYiNjUX37t3x119/YdWqVYauV/QkEgk8PT1F1fGkzOJAmcWBMouDEDLzqnz9+vVo0aIFjh49ii5duqgt69y5M44cOYIWLVogMjLSIEWSf8lkMkRHR5v0zAB9UWZxoMziQJnFQQiZeXWQkpKSMGjQIJibm2tcbm5ujoEDByIpKalKxZHyGGPIz8836ZkB+qLM4kCZxYEyi4MQMvPqIFlYWODZs2cVrvPs2TNYWFjwKooQQgghpDbx6iC1bdsW27ZtQ2Zmpsbl9+/fx7Zt29CuXbsqFUcIIYQQUht4PWpkz549GDx4MBo0aIDZs2ejR48ecHV1xcOHD3Hs2DGsWLECDx8+xN9//40BAwZUR93VzlgfNaJQKJCTkwMnJyeTHvymD8pMmYWKMlNmoTLWzPp8tvN+FtuKFSswb948yOVytXbGGMzMzPDll1/iww8/5LNro2CsHSRCCCGE8FMjz2KbNWsWEhISsGDBAgwZMgShoaEYMmQIFi1ahISEBJPuHBmz0tJS7Nu3D6WlpbVdSo2hzOJAmcWBMouDEDJX6VlsTZs2xWeffWaoWoiOTHnaJF+UWRwoszhQZnEw9cwGuTAok8nw+PFjk/9mEEIIIYQAVeggyeVyrFy5Ev7+/rCysoKTkxOsrKzg7++Pb775hjpLhBBCCDFZvAZpFxQUICwsDGfOnIFEIoG7u7tqFltGRgYUCgW6dOmCgwcPok6dOtVRd7Uz1kHayptv2dnZgeO42i6nRlBmyixUlJkyC5WxZq72Qdr//e9/ERcXh9deew23b99GSkoK4uLikJKSgtu3b2PMmDGIjY3Ff//7X14BSMWsra1ru4QaR5nFgTKLA2UWB1PPzKuDtG3bNnTo0AEbN26Eh4eH2jIPDw9s2rQJ7du3x9atWw1SJPmXTCZDVFSUqC5hUmZxoMziQJnFQQiZeXWQHj16hN69e1e4Tu/evZGbm8urKEIIIYSQ2sSrg9S8eXNkZWVVuE52djaaNWvGqyhCCCGEkNrEq4M0Y8YMbN26FdevX9e4/Nq1a/jjjz8wc+bMqtRGCCGEEFIreM1iO3HiBJYvX45Dhw5h/Pjx6N69u2oW28mTJ7FhwwaEhYVh1qxZ5bYNDg42SOHVzZhnsclkMpiZmRnVzIDqRJkps1BRZsosVMaaudqfxSaRSMBxHJSbvhxeU9vLyj67zVgZcwfJGKdOVifKTJmFijJTZqEy1sz6fLbzetTIf//7X6MKLCYymQwxMTHo168fzM3Na7ucGkGZKbNQUWbKLFRCyMyrg7RgwQIDl0EIIYQQYjwM8iw2QgghhBAh4XUGSeny5cvYsmULEhISUFhYiCNHjgAA7ty5g7Nnz6J3795wdHQ0SKHkX2ZmVTpsJokyiwNlFgfKLA6mnpnXIG0AmDt3LpYvX642KFs5ADstLQ3NmjXD8uXLMWPGDMNVW4OMdZA2IYQQQvip9mexRUZGYtmyZRgwYACuXr2K+fPnqy338vJCp06dsHv3bj67JxVQKBTIysqCQqGo7VJqDGUWB8osDpRZHISQmVcHafXq1WjVqhV27tyJ1q1bw8LCotw6LVu2RHJycpULJOrkcjni4uJM5nYJhkCZxYEyiwNlFgchZObVQbpx4wb69OlT4fVFV1fXSh9HQgghhBBijHh1kMzMzPD8+fMK18nMzIStrS2vogghhBBCahOvDpKfnx+io6O1njpTzmhr3759lYoj5XEcZ3R3Jq1ulFkcKLM4UGZxEEJmXh2kiRMnIikpCe+//z5KSkrUluXl5WHChAl48OAB3nnnHYMUSf5lZmaG0NBQk58+qQ/KLA6UWRwoszgIITPvDtKYMWPw66+/wtnZGb/++isAoFOnTmjUqBF27NiB8ePHY8SIEQYtlryYGXDnzh2TnhmgL8osDpRZHCizOAghM+87aW/evBk//fQTmjRpgnv37oExhgsXLsDDwwNr1qzBb7/9Zsg6yf+Ty+WIj4836ZkB+qLM4kCZxYEyi4MQMlfpUSPvvPMOrly5goKCAty9exd5eXm4fv063nvvPd77LCgoQEREBF599VU4OjqC4zisW7dOp23XrVsHjuM0fj148IB3TYQQQggRF4NcHLS2toa1tbUhdoWcnBwsWrQIHh4e8Pf3x7Fjx/Tex6JFi9CkSRO1NgcHB4PURwghhBDhM7rRU25ubrh//z4aNGiACxcuoGPHjnrvIzw8HB06dKiG6mofx3FwdnY26ZkB+qLM4kCZxYEyi4MQMhtdB8nS0hINGjSo8n7y8/NhY2MDqVRqgKqMh5mZGbp27VrbZdQoyiwOlFkcKLM4CCGz0XWQDCEkJAQFBQWwsLBAWFgYli9fjubNm1e4TUlJidotC/Ly8gAApaWlKC0tBQBIJBJIpVLI5XK1kfnKdplMhpef/SuVSiGRSLS2K/erpJwOKZPJtLbL5XLcvn0b3t7esLKygkKhUBsEx3EczMzMtLZrq702M73M3Ny8XO0KhQKpqanw9vZWW9eUM1V2nJ4/f47k5GR4e3ur6jD1TJUdJ+V7u3nz5rCwsBBEprLtZWsHgNu3b6NJkyaQSP4dEmrKmSo7TsXFxarfYVKpVBCZKjtOyve2j4+Pav+mnqmydolEgqSkJDRp0kR1osIYMpXNVRFBdZBsbGwwYcIEhISEwN7eHhcvXsSKFSvQtWtXXLp0Ce7u7lq3XbJkCRYuXFiu/dChQ7CxsQEAeHh4oG3btrh69SrS09NV6/j4+KBly5Y4d+4csrOzVe0BAQHw9PTEiRMnkJ+fr2rv0qULXFxccOjQIbU3VUhICKytrREVFaVWQ79+/VBUVISYmBhVW1paGvr374+cnBzExcWp2u3s7BAaGoqMjAzEx8er2p2dndG1a1ckJycjMTFR1W5MmczMzDRmsrW1VXV4r127JohMuhynW7du4datW4LKpMtxAgBfX19BZdJ2nPz8/JCYmIh79+6hoKBAEJkqO05Hjx6FTCZTvbeFkEnX49SkSRPIZDJBZdJ2nPr27YukpCQkJSUZVSblyQ9dcKzsnzNGRDkGKTIyEhMmTOC1j1OnTiE4OBjvvvsufvzxR63raTqD5O7ujpycHNjb2wMwjh56aWkpDh8+jD59+sDGxkZwf3Vo+ktKJpPh0KFDePXVV9X+yjblTJUdp+LiYhw8eBB9+vSBubm5IDJVdpyU7+2wsDBYWVkJIlPZ9rK1KxQKHDhwAH379lW7oZ4pZ6rsOBUWFqp+h5mbmwsiU2XHSfneDg8Ph7m5uSAyVdYOAFFRUarjbCyZ8vLy4OTkhKdPn6o+27XhdQZpw4YNcHV1RVhYGJ/Na1T37t0RGBiII0eOVLiepaUlLC0ty7Wbm5urDq6SVCrVOLZJ2x1DtbWX3a8+7cr/l0gkap0GJW3t2mo3hkxKFWXStL6pZ9J2nJT7enl/pp5Jl+Ok/H8hZVIqW7vyQ8jMzExjPaaYqbJ2ZY1l39tCyFRZu/K2M0LKpK1d+d7W9Blam5m01a8Jr/sgTZo0CQcOHOCzaa1wd3dHbm5ubZdhEBKJBB4eHhrfXEJFmcWBMosDZRYHIWTmdQbJzc1N4+k0Y5WSkgJnZ+faLsMgpFIp2rZtW9tl1CjKLA6UWRwoszgIITOvrt2gQYNw+PDhcg+qrUn3799HQkKC2nXSlwdrKUVFReHixYt49dVXa7K8aiOXy3H58mWTvn27viizOFBmcaDM4iCEzLw6SJ9//jnq1KmDYcOG4fr164auCd9//z0WL16sep7bnj17sHjxYixevBhPnz4FAMyfPx+tWrXCvXv3VNt17doVo0aNwldffYWffvoJ7733HgYPHgx3d3d88sknBq+zNigUCqSnp5v0AwD1RZnFgTKLA2UWByFk5nWJrW3btigpKUF8fDwOHDgAKysruLi4lLtjJsdxuH37tt77X7ZsGe7cuaP6959//ok///wTAPDGG2+gbt26GrcbPXo09u3bh0OHDqGwsBBubm545513EBERAVdXV73rIIQQQog48eogKRQKWFhYwMPDQ6297B0D+N5BIC0trdJ11q1bV+4htsqzTIQQQgghVcGrg6RLB4ZUD4lEAh8fH5OeGaAvyiwOlFkcKLM4CCGzUd8osjbl5eWhbt26Ot1MihBCCCHGT5/P9ip37W7cuIE///wTv//+e1V3RXQgk8kQGxtrUrdZqCrKLA6UWRwoszgIITPvDtL58+cREBAAPz8/jBw5Uu1RICdOnICNjQ12795tiBrJSxhjyM7O5j2+yxRRZnGgzOJAmcVBCJl5dZCuX7+O0NBQpKam4sMPP0R4eLja8qCgIDg5OWH79u0GKZIQQgghpCbx6iBFREQAAC5evIhly5ahY8eOass5jkOXLl1w/vz5qldICCGEEFLDeHWQjh8/juHDh6NZs2Za1/Hw8MD9+/d5F0Y0k0qlCAgI0PjgPqGizOJAmcWBMouDEDLzmuafn58PFxeXCtcpKioy6VuMGyuJRAJPT8/aLqNGUWZxoMziQJnFQQiZeZ1Bcnd3x7Vr1ypc59KlS/D29uZVFNFOJpMhOjrapGcG6IsyiwNlFgfKLA5CyMyrgzRgwAAcOnQIR44c0bh827ZtOHPmDIYMGVKV2ogGjDHk5+eb9MwAfVFmcaDM4kCZxUEImXldYvvkk0+wY8cO9OvXD+PHj8eDBw8AAKtXr0ZcXBy2bNkCLy8vzJo1y6DFEkIIIYTUBF4dJGdnZxw/fhxvvvkmfv31V1X71KlTAQCBgYHYsmWL1ofKEkIIIYQYsyo/aiQ+Ph5nzpxBbm4u7O3tERgYWG7avyky1keNKBQK5OTkwMnJyaSfcaMPykyZhYoyU2ahMtbM+ny207PYtDDWDhIhhBBC+KnRZ7E9evQI0dHR2LVrF6Kjo/Ho0aOq7pJUoLS0FPv27UNpaWltl1JjKLM4UGZxoMziIITMvMYgAUBaWhpmzJiBffv2qY1S5zgOAwYMwDfffAMvLy9D1EjKMOVpk3xRZnGgzOJAmcXB1DPz6iDdvn0b3bp1Q1ZWFpo3b45u3brB1dUVDx8+RGxsLHbv3o0zZ84gNjYWTZs2NXTNhBBCCCHVilcH6eOPP0Z2djZ+/PFHvPPOO+A4TrWMMYa1a9di8uTJ+Pjjj+mBtYQQQggxObwGaderVw89e/bErl27tK4zePBgnDhxAo8fP65SgbXFWAdpK2++ZWdnp9YxFTLKTJmFijJTZqEy1szVPkhbLpfD19e3wnVat25Nz2KrJtbW1rVdQo2jzOJAmcWBMouDqWfm1UFq164drl+/XuE6169fR4cOHXgVRbSTyWSIiooy+cFv+qDM4kCZxYEyi4MQMvPqIH3++efYv38/fvnlF43L165di4MHD2Lx4sVVKo4QQgghpDbwGqR99OhRhISE4L333sPy5cvVZrGdPn0aSUlJCAsLw5EjR9QeaMtxHD777DODFU8IIYQQUh14dZAWLFig+v/ExEQkJiaWW+fAgQM4cOCAWht1kAghhBBiCnjNYjt+/DjvF+zRowfvbWuSMc9ik8lkMDMzM6qZAdWJMlNmoaLMlFmojDWzPp/tvM4gmUonR6iKiopgZ2dX22XUKMosDpRZHCizOJh6ZuN5xC7RiUwmQ0xMjEnPDNAXZRYHyiwOlFkchJCZOkiEEEIIIWVQB4kQQgghpAzqIJkgMzNeQ8dMGmUWB8osDpRZHEw9M69ZbGJgrLPYCCGEEMJPtT+LjdQehUKBrKwsKBSK2i6lxlBmcaDM4kCZxUEImamDZGLkcjni4uJE9SBgyiwOlFkcKLM4CCEzdZAIIYQQQsrQaQSVRCLhdSdMjuNM+h4IhBBCCBEnnTpIwcHB5TpIjx8/xtWrVyGVSuHu7q56WG1GRgbkcjnatGmDevXqVUvRYsZxHOzs7Izq1u3VjTKLA2UWB8osDkLIzGsW2927d9GtWzcEBQXhiy++gIeHh2pZeno65s+fj9OnT+PUqVNo3LixQQuuKTSLjRBCCBGWap/F9tFHH8HNzQ0bN25U6xwBgIeHBzZt2oQGDRpgzpw5fHZPKqBQKHDnzh2TnhmgL8osDpRZHCizOAghM68O0pEjR9CrV68K1wkNDcWRI0d4FUW0k8vliI+PN+mZAfqizOJAmcWBMouDEDLz6iAVFxfj/v37Fa6TmZmJoqIiXkURQgghhNQmXh2k9u3b448//kBcXJzG5bGxsdi6dSs6duxYpeIIIYQQQmoDrwelfP755+jVqxeCgoIwcOBAdO/eHS4uLsjKysLJkyexd+9emJmZYfHixYauV/Q4joOzs7NJzwzQF2UWB8osDpRZHISQmfez2I4ePYp3330XqampL3bEcVDuqkmTJli7dm2l45SMGc1iI4QQQoRFn8923o/a7dWrF27duoVTp07hypUrePr0KerWrQt/f390797dpHuNxkwulyM5ORnNmzeHVCqt7XJqBGWmzEJFmSmzUAkhM+8OEvDirFFQUBCCgoIMVQ+phEKhQGJiIry9vU32TacvykyZhYoyU2ahEkLmKnWQAODGjRtISEjAs2fP8OabbxqiJkIIIYSQWsX7YbXnz59HQEAA/Pz8MHLkSEyYMEG17MSJE7CxscHu3bsNUSMhhBBCSI3i1UG6fv06QkNDkZqaig8//BDh4eFqy4OCguDk5ITt27cbpEjyL4lEAg8PD0gkvPu2JocyiwNlFgfKLA5CyMxrFtuIESNw8OBBXL58Gc2aNcPChQuxaNEitTtmjh49GleuXEFCQoJBC64pNIuNEEIIEZZqfxbb8ePHMXz4cDRr1kzrOh4eHpXebZvoTy6X4/LlyyZ9+3Z9UWZxoMziQJnFQQiZeXWQ8vPz4eLiUuE6RUVFJv2NMVYKhQLp6ekm/QBAfVFmcaDM4kCZxUEImXl1kNzd3XHt2rUK17l06RK8vb15FUUIIYQQUpt4dZAGDBiAQ4cO4ciRIxqXb9u2DWfOnMGQIUOqUhshhBBCSK3gdR+kTz75BDt27EC/fv0wfvx4PHjwAACwevVqxMXFYcuWLfDy8sKsWbMMWix5MTPAx8fHpGcG6IsyiwNlFgfKLA5CyMz7WWwpKSl48803ERcXV25ZYGCgqpOkr4KCAnz99dc4e/Yszp07h8ePHyMyMlLtPksVefLkCebOnYtdu3ahsLAQnTp1wvLly9GuXTu96qBZbIQQQoiwVPssNgBo2rQpTp8+jUuXLmH16tVYvHgxvvvuO5w9exZxcXG8OkcAkJOTg0WLFuHmzZvw9/fXa1uFQoH+/ftj8+bNmDp1Kr766itkZWWhZ8+eSE5O5lWPsZHJZIiNjYVMJqvtUmoMZRYHyiwOlFkchJC5yo8aCQgIQEBAgAFKecHNzQ33799HgwYNcOHCBXTs2FHnbXfs2IHY2Fhs374dI0aMAACMGjUKLVq0QEREBDZv3mywOmsLYwzZ2dngeeLPJFFmcaDM4kCZxUEImY3u4qClpSUaNGjAa9sdO3bA1dUVw4YNU7U5Oztj1KhR+Pvvv1FSUmKoMgkhhBAiYLzPIOXn5+PXX3/FlStXkJmZidLS0nLrcByHo0ePVqlAfVy+fBnt2rUrNyisU6dOWLt2LZKSkuDn56dx25KSErUOVF5eHgCgtLRUlU0ikUAqlUIul6vd20HZLpPJ1HrLUqkUEolEa3vZ75mZ2YvDUfaU5Mvtym1KS0thbm4OhUKhdr8pjuNgZmamtV1b7bWZ6WWaMinXUSgUavs35Uy6HCcARvXeM0Smio6Tsia5XA5zc3NBZCrbXrZ25TplazHlTLoeJ+V/hZRJW+3K5YwxMMYEkamydiVNv7NrM5Omvoo2vDpI58+fR3h4OB4/flzh6TOO4/jsnrf79+8jODi4XLubmxsAIDMzU2sHacmSJVi4cGG59kOHDsHGxgbAi7uDt23bFlevXkV6erpqHR8fH7Rs2RLnzp1Ddna2qj0gIACenp44ceIE8vPzVe1dunSBi4sLDh06pPamCgkJgbW1NaKiotRq6NevH4qKihATE6Nqi4mJQf/+/ZGTk6M2UN7Ozg6hoaHIyMhAfHy8qt3Z2Rldu3ZFcnIyEhMTVe3GlMnMzExjJltbWwQEBCAzMxNXr14VRKbKjlNqaioA4PDhw4LJpOtxSk1NRatWrQSVSdtxatOmDQICAhAbG4uCggJBZKrsOClrVL63hZBJ1+PEGEN+fr6gMmk7TuHh4WjatKnqOBtLJuXJD13wmsXWtWtXnDt3DkuWLMFrr70GNzc3SKVSfXdTKeUYJF1nsUmlUrz33ntYvXq1Wnt0dDR69eqFXbt2ab03k6YzSO7u7sjJyVGNdDe2HroQ/+qgTJSJMlEmykSZqitTXl4enJycdJrFxusM0uXLlzFmzBjMmTOHz+bVxtraWuM4o+LiYtVybSwtLWFpaVmu3dzcHObm5mptUqlUY4dQ+UbRtb3sfnVpl8lkOHHihOpMmUQi0XifCW3t2mqvzUxlla1dJpMhOjoawcHBGtc3xUyVtTPGcPLkSQQHB6vVZcqZKjtOZd/bQshUVtnaZTIZjh07pvW9bYqZKmvnOE51nF9ex5QzVVb7y+9tMzMzQWSqrL1s5pfVZiZt9WvCa5C2o6MjnJ2d+WxarZQz4MpStjVs2LCmSzI45SlaU54ZoC/KLA6UWRwoszgIITOvDtKQIUMQHR1tdA+hCwgIwKVLl8rVdfbsWdjY2KBFixa1VBkhhBBCTAmvDtKSJUtgbm6O119/Hffu3TN0TTq5f/8+EhIS1K6TjhgxAg8fPsSff/6pasvJycH27dsxcOBAjZfQCCGEEELK4v2okUuXLqF37954+vQp6tWrp3GwE8dxuH37tt77/v777/HkyRNkZmZizZo1GDZsGNq2bQsAmDZtGurWrYsJEyZg/fr1SE1NVd21Wy6Xo3v37vjnn38wZ84cODk5YfXq1UhPT8f58+fh4+Ojcw3G+qgRhUKBnJwcODk5mfQzbvRBmSmzUFFmyixUxppZn892Xh2ko0ePYuDAgSguLoa5uTlcXFy0DopSTlfWh5eXF+7cuaN1f15eXho7SADw+PFjzJkzB3/99ReKiorQsWNHLFu2DB06dNCrBmPtIBFCCCGEn2p/FtvHH38Mxhi2bt2K4uJiZGRkIDU1VeMXH2lpaaobapX9UnaG1q1bp/ZvpXr16uGXX35BTk4Onj17hmPHjundOTJmpaWl2Ldvn143uzJ1lFkcKLM4UGZxEEJmXtP8b9y4gTfeeAMjR440dD1EB6b88D++KLM4UGZxoMziYOqZeZ1BcnZ2rvCeQoQQQgghpoxXB+n111/H/v37UVRUZOh6CCGEEGLCmFwOxdVLcE26DsXVS2Av3TXblPAapP38+XOMHj0aubm5+OKLL+Dv7w9bW9vqqK/WGOsgbeXNt+zs7Gr8WXe1hTJTZqGizJRZaGQnY/D8hxVgOVmqNs7JBRZTZsEsKKQWK3uh2mexKW/5zRir8GBzHGey1yCNuYMkk8lgZmYm+B80JcpMmYWKMlNmY8UYQ2FhoX4bxZ0Ali4AA6Ax5bwFQJfyD5SviI2NjUG/Z/p8tvMapB0UFGQyB1loZDIZoqKi0K9fP72eKWPKKDNlFirKTJmN1bNnz2BnZ6fz+hIAN7r5o6GlBSQa+gcKxnBvwXz4nr4CfZ7BkZ+fX2tXqHh1kI4dO2bgMgghhBBiLPQ9e9TNwQ6NrbQ/rULCcXC3skQ3BzucfJKvVx211UEynttbEkIIIcQo2NjY6LV+A0vdzozpuh7fOgyJ1xkkQgghhAhXnTp1UFBQoPsG1+KBT2dVulrkX7sR6Reg825rs4PEa5B2aGiobjvnOBw9elTvoowBDdI2HpSZMgsVZabMQsHkchS9PkRt9lpZnLMrrDfuAvf/E71qQ7UP0q5sDBLHcZXOcCP8FRUV6TV4TggoszhQZnGgzMLDSaWwmDILJQvnaV3HYvKHtdo50hevMUgKhULj15MnTxAdHY3AwECMGDECz58/N3S9oieTyRATE2Oyt0/ggzKLA2UWB8osXGZBIbCMWArOyUWtnXN2hWXEUqO4D5I+DDoGyd7eHj179sTBgwfh5+eHzz//HP/9738N+RKEEEIIMVJmQSGQdg3G8/gLuBx9FG1De8EioINJnTlSqpZZbHZ2dggPD0dkZGR17J4QQgghRoqTSiFp0w4PW/hC0qadSXaOgGqc5i+RSHD//v3q2r2omZmJb/IhZRYHyiwOlFkcTD0zr1lslUlJSUHnzp3h6OiIhIQEQ+++RhjrLDZCCCGE8FPts9gmTpyosV0mk+HevXs4deoUSktLsWjRIj67JxVQKBTIycmBk5MTJBJx3OeTMlNmoaLMlFmohJCZVwdp3bp1FS738fHB7Nmz8fbbb/PZPamAXC5HXFwc+vXrZ7JvOn1RZsosVJSZMguVEDLz6iClpqZqbJdIJHBwcBD0vR4IIYQQIny8Okienp6GroMQQgghxGgYdIh5Xl4ezp49CysrK3Tv3p3upF0NOI6DnZ2dqL63lFkcKLM4UGZxEEJmXrPYfv75Z2zcuBF//fUX6tWrBwC4cuUKwsPD8fDhQwBAly5dcOjQoVp90FxV0Cw2QgghRFj0+WznNXLq999/R0lJiapzBACzZ89GVlYW3nrrLfTr1w9xcXFYs2YNn92TCigUCty5cwcKhaK2S6kxlFkcKLM4UGZxEEJmXh2kpKQk+Pv7q/796NEjxMTE4O2338Yvv/yCPXv2oGPHjti0aZPBCiUvyOVyxMfHQy6X13YpNYYyiwNlFgfKLA5CyMyrg/TkyRM4Ozur/n3y5EkAwLBhw1Rt3bt3R1paWtWqI0SEmFwOxdVLcE26DsXVS2Am/AuGEEJMFa9B2vXr11d7jMjRo0chlUrRrVs3VRtjDKWlpVWvkBARkZ2MwfMfVoDlZMEPgOzg35A7ucBiyiyTexI2IYSYMl4dpDZt2uDvv//Ghx9+CCsrK2zevBndunVDnTp1VOukpaXBzc3NYIWSFziOg7Ozs0nPDNCXKWZmjKGwsFC/jeJOAEsXgAF4OSnLyULJwnkombcA6BKs8+5sbGxM6ntmise5qiizOFBm08RrFltMTAx69+6t1vbXX39h4MCBAF4MznJzc0NoaCi2bNlimEprGM1iI1Xx7Nkz2Nra6ry+BMCNbv5oaGkBiYZfKArGcK/kOXxPX4GuQx4LCgrU/mghhBCxq/ZZbCEhIdi9ezeGDh2KoUOHYuvWrarOEQCcPn0aDRs2VBuTRAxDLpcjISHBpAe+6csUM+v7d0c3Bzs0trLU2DkCAAnHwd3KEt0cdL9LfTU8h7pameJxrirKLA6U2TTxvlFk//790b9/f43LgoKCcPnyZd5FEe0UCgUSExPh7e0NqVRa2+XUCDFkbmBpbtD1TJEYjnNZlJkyC5UQMpvmE+QIMXL6Xnd/UKLbhAZd1+NTAyGEkH9V6VEj586dw/nz5/HkyRONp9E4jsNnn31WlZcgxCTZ2NigoKBA9w3kcuCdscCjbO3rODlj/53DgI5/jZnqXewJIcQY8Oog5ebmYsiQITh9+nSF4xyog2R4EokEHh4ekEjEc/LPFDNzHKf3AGnZ1NkoWThP63LLKbNhJuAJA6Z4nKuKMosDZTZNvGaxTZgwARs2bEDPnj0xfvx4NG7cGGZmmvtaPXr0qHKRtYFmsZHa8PJ9kJQ4Z1dYTP6Q7oNECCFVpM9nO68OkpOTE5o1a4a4uDjBjnMw1g6SXC7H1atX0aZNG5Md+KYvsWVmcjlKr1xCevxleAS0hbl/O3AiyC224wxQZsosXMaaudqn+RcVFSE4OFiwnSNjplAokJ6ebtIPANSX2DJzUik4vwBcr+8Kzi9AFJ0jQHzHGaDMYkGZTROvDlJAQAA9Z40QQgghgsWrgxQREYHdu3fjzJkzhq6HEEIIIaTW8ZrF9uDBA/Tv3x89evTA66+/jnbt2mm9ljdu3LgqFUjUSSQS+Pj4mPTMAH1RZnGgzOJAmcVBCJl5DdKWSCTgOE5tin/Z8UiMMXAcZ7K3GTfWQdqEEEII4Uefz3ZeZ5AiIyN5FUaqTiaT4dy5c+jUqZPWWysIDWWmzEJFmSmzUAkhM6+qx48fb+g6iI4YY8jOzja5B5FWBWUWB8osDpRZHISQ2XQvDhJCCCGEVJMqnfdKS0vDpk2bEB8fj7y8PNjb2yMgIACvv/46vLy8DFQiIYQQQkjN4jVIGwC+/fZbzJ07FzKZrNwpNHNzc3z11VeYMWOGQYqsDcY6SFuhUCAjIwPu7u4mPTtAH5SZMgsVZabMQmWsmav9USN79+7FoEGD4OTkhA8//BAhISFwc3PDgwcPEBMTgxUrVuDRo0fYvXs3+vfvzztIbTLWDhIhhBBC+Kn2R42sWLECjo6OuHTpEubPn4/OnTvD09MTgYGBmDdvHi5evIh69ephxYoVvAIQzZhcjucXz+HKqhV4fvEcmIneQkFfMpkM0dHRkMlktV1KjaHM4kCZxYEymyZeY5AuXbqE119/HY0bN9a43N3dHaNGjcLmzZurVBz518tPeW8GoPTvrZA5ucBiyixBP+WdyeWQX7kIm4tnIK9bB9KADqJ4NhljDPn5+SY9A0RflFkcKLM4CCEzrw7S8+fPUadOnQrXsbW1xfPnz3kVJWSMMRQWFuq3UdwJYOkCMAAv346T5WShZOE8lMxbAHQJ1nl3NjY2JvGg4Zc7hX4AZAf/hlwEnUJCCCG1j1cHqUWLFtizZw+++OILjTeAkslk2Lt3L1q0aFHlAoWmsLAQtra2Oq8vAXCjmz8aWlpAoqFTo2AM9xbMh+/pK9D1mckFBQWVdnANiTqFhBBCTA2vMUjjxo1DYmIiwsLCcPHiRbVlFy5cQHh4OBITE+mGkhroe7qxm4MdGltZauwcAYCE4+BuZYluDnbVVkNVPXv2DLa2tjp/2dva4u6C+VAwBk2pFYwhY8F82Ouxz2fPntVo5qqSSqXo0qULpCK4nKhEmcWBMouDEDLzOoM0Y8YMnDhxArt370anTp1gY2MDFxcXZGVlobCwEIwxDB482KSn+RuLBpbmBl2vNuh79kjZKdTm5U7hySf5Otegz5m72iaRSODi4lLbZdQoyiwOlFkchJCZ1xkkqVSKv/76C+vWrUPPnj1hYWGB9PR0WFhYICQkBOvXr8euXbuM6t4HxkLfyzwPSkoNuh6fGqrKxsZGr/Wro1Oobw21rbS0FPv27UNpqe7H1dRRZnGgzOIghMxVupP2uHHjMG7cOEPVIgo2NjYoKCjQfQO5HHhnLPAoW/s6Ts7Yf+cwoOOpzJruLNSpU0e/zNfigU9nVbpa5F+7EekXoNMuTa2DBMCkp8fyRZnFgTKLg6lnNs1H7JowjuP0HiAtmzobJQvnaV1uOWU2zIz4Zpb6ZmYdO6PIyQUsJ0v7Pp1dYd2xsyim/BNCCKl5vK6B7d27F8OGDUNmZqbG5ZmZmRg2bBj279/Pq6iSkhJ8/PHHaNiwIaytrREYGIjDhw9Xut2CBQvAcVy5LysrK151GAuzoBBYRiwF56R+PZdzdoVlxFLBTXnnpFJYTKn4DJLF5A+pc0QIIaTa8HrUSHh4ODIzM3HlyhWt67Rt2xaNGjXC3r179S7qtddew44dOzBz5kw0b94c69atw/nz5xETE4Pu3btr3W7BggVYuHAh1qxZozYgVyqV4rXXXtOrBmN81AiTyyG/dhlF9+7CulFjSP3aCrqT8PJ9kJQ4Z1dYTP5QcJ3CspQ3WbOzsxPN7QkoM2UWKspsPJn1+WzndYntypUrGDBgQIXrBAYG8uocnTt3Dn/88Qe+/vprfPTRRwBejHVq3bo15s6di9jY2Er3MWLECDg5Oen92saOk0oh9W8PK19/SM3MjOpNVx3MgkIg7RoM+bXLkGVnwczZRfCdwpdZW1vXdgk1jjKLA2UWB1PPzOsSW25ubqXT95ycnJCTk6P3vnfs2AGpVIp3331X1WZlZYVJkyYhLi4OGRkZle6DMYa8vDyTvsW5NjKZDFFRUSY/+E1XnFQK5uuPg4WlYL7+oukcie04A5RZLCizOAghM68zSM7OzkhMTKxwncTERDg6Ouq978uXL6NFixblTn116tQJABAfHw93d/cK99G0aVPV3aKHDBmC5cuXw9XVtcJtSkpKUFJSovp3Xl4egBdTFZXTFCUSCaRSKeRyORSKf+9brWyXyWRqnTKpVAqJRKK1vez0R+Vdycu+oV5uV25TWloKc3NzKBQKyF96aC3HcTAzM9Parq322sz0Mk2ZlOsoFAq1/ZtyJl2OEwCjeu8ZIlNFx0lZk1wuh7m5uSAylW0vW7tynbK1mHImXY+T8r9CyqStduVyxhgYY4LIVFm7kqbf2bWZSZ/bDvDqIAUHB2Pnzp24evUq2rRpU275lStXsHv3bgwbNkzvfd+/fx9ubm7l2pVt2gaGA0C9evUwdepUdOnSBZaWljh58iR++OEHnDt3DhcuXKjweuOSJUuwcOHCcu2HDh1STRH38PBA27ZtcfXqVaSnp6vW8fHxQcuWLXHu3DlkZ/87HT8gIACenp44ceIE8vP/vaFhly5d4OLigkOHDqm9qUJCQmBtbY2oqCi1Gvr164eioiLExMSo2mJiYtC/f3/k5OQgLi5O1W5nZ4fQ0FBkZGQgPj5e1e7s7IyuXbsiOTlZrXNrTJnMzMw0ZlKOJ7t37x6uXbsmiEyVHafbt28DgGpyghAy6Xqcbt++DV9fX0Fl0nac/Pz8AACxsbFqt8Iw5UyVHSdljcr3thAy6XqcZDIZiouLBZVJ23Hq27cvAKhNsDKGTMqTH7rgNUj76tWr6NSpEywsLPDRRx+hT58+aNSoEe7du4dDhw5h+fLlKC0txdmzZzV2oCri7e0NHx+fcgchJSUF3t7eWLlyJWbOnKnz/jZv3ozXX38dS5Yswbx52qfKazqD5O7ujpycHFXHyhh66KWlpTh8+DD69OkDGxsbwf3Voe0M0qFDh/Dqq6+q3XzUlDNVdpyKi4tx8OBB9OnTB+bm5oLIpMsZpMOHDyMsLAxWVlaCyFS2XdMZpAMHDqBv375qz7U05UyVHafCwkLV7zBzc3NBZNLlDNLhw4cRHh6uOjtq6pkqaweAqKgo1XE2lkx5eXlwcnLSaZA2rw4SAOzcuRPjx49HUVGRWjtjDLa2ttiwYQOGDBmi935bt24NV1dXHD16VK39xo0b8PX1xY8//oj33ntPr326ubnB19cXR44c0XkbY5zFBkB1etZMBIO0lSgzZRYqykyZhcpYM1f7LDYAGD58OIKCglRT8J8+fQoHBwd06tQJ48ePh7OzM6/9urm54d69e+Xa79+/DwBo2LCh3vt0d3dHbm4ur3qMUVFREezsdH84rRBQZnGgzOJAmcXB1DNX6WFpLi4umDt3LrZv345Dhw5h27Zt+Oijj3h3joAX1xCTkpLKXSc8e/asark+GGNIS0urUk3GRCaTISYmxqRnBuiLMosDZRYHyiwOQshsdE+THTFiBORyOdauXatqKykpQWRkJAIDA1Uz2NLT05GQkKC27cuDtZTWrFmD7OxsvPrqq9VbOCGEEEIEw+iexRYYGIiRI0di/vz5yMrKQrNmzbB+/XqkpaXh119/Va03btw4HD9+XG0glqenJ0aPHg0/Pz9YWVnh1KlT+OOPPxAQEKD3uCVCCCGEiJfRdZAAYMOGDfjss8/w+++/4/Hjx2jTpg327t2L4ODgCrd7/fXXERsbi507d6K4uBienp6YO3cu/vOf/5jk09y1eXm2i1hQZnGgzOJAmcXB1DPznsUmdMY6i40QIWJyORTX4sFyc8A5OkHiFyCau6YTQmpOjcxiI7VDoVAgJycHTk5OavcEEjLKLOzMGh9K7OQCiymzBP9QYjEdZyXKTJlNhWlWLWJyuRxxcXFqN9kSOspsGhhjePbsmX5fR/ajZOE8KF7qHAEAy8lCycJ5eHZkv177M7UT4qZ4nKuKMouDEDLzOoOUmZnJ635EhBDhevbsmV73PJEAuNHNHw0tLSDRcCM5BWO4t2A+fE9fgaL85hrl5+erHk1DCCFVwesMkpeXFwYPHoy9e/eq3f6bECJehYWFeq3fzcEOja0sNXaOAEDCcXC3skQ3B907XfrWQAgh2vDqIHXu3Bl79uzB4MGD4eHhgf/+979IS0szcGlEE47jYGdnZ1S3bq9ulNk06DtTtIGluUHX41NDbTPF41xVlFkchJCZ9yy2pKQk/Pzzz/j999+RlZUFiUSC3r1745133sHgwYNNfnofzWIjRD+MMf3O4FyLBz6dVfl6i1cAfgE67dLGxsakfyETQqqXPp/tVZ7mL5PJ8Pfff+OXX37B4cOHwRiDk5MTJkyYgEmTJqFFixZV2X2tMdYOkkKhQEZGBtzd3U12ZoC+KLMwMzO5HEWvD1GbvVYW5+wK6427BDvlXwzHuSzKTJlrkz6f7VWu2szMDMOHD8f+/fuRlpaGiIgISCQSLFu2DK1atUJISAi2bdtmcrNLjJVcLkd8fLxJzwzQF2UWJk4qhcWUis8gWUz+ULCdI0Acx7ksyiwOQshssG6dQqHAxYsXcf78eWRnZ4MxBnd3d5w+fRqvvfYa/P39kZycbKiXI4QIgFlQCCwjloJzclFr55xdYRmxVPD3QSKEGK8qDxRKSUnBL7/8gvXr1+PBgweqM0rvv/8+QkJC8ODBA6xcuRIrV67EBx98gCNHjhiibkKIQJgFhUDaNRjP4y/gcvRRtA3tBYuADoI+c0QIMX68OkilpaXYuXMnfv75Zxw/fhwKhQJNmjTBF198gbfeegsuLv/+NdigQQN8+eWXyMvLw4YNGwxWuFhxHAdnZ2dRDUSlzMLHSaWQ+reHokQOqX970XSOxHacAcosFkLIzGuQtrOzM3JzcyGVSjFw4EC899576Nu3b4XbLF26FJ988onJ3DfJWAdpE0IIIYSfah+kbWNjg4ULF+LOnTvYuXNnpZ0jAJg8eTJSU1P5vBx5iVwuR0JCgkkPfNMXZRYHyiwOlFkchJCZVwcpLS0Nn376Kdzc3HText7eHp6ennxejrxEoVAgMTHRZM7EGQJlFgfKLHxMLocs/gKe7PkTsvgLYCb84akPsR1nQBiZeY1BMuVrioQQQmqe7GQMnv+wAiwnC34AZAf/htzJBRZTZtFsRWKUeHWQJk6cWOk6EokE9vb28PHxwYABA9CoUSM+L0UIIcTEyU7GoGThvHLtLCfrRTvd0oEYIV4dpHXr1qnOImka481xnFr7tGnT8N///heffvopzzKJkkQigYeHh1HdmbS6UWZxoMymQaFQICcnR48N5LBe9TU4AJquPTAARd8vQ1HzloBEt9mLTk5OJvU9M8XjXFVCyMxrFltqaipmzpyJc+fOYcaMGejWrRtcXV3x8OFDnD59Gt999x06deqE//znP7hy5QoWL16MjIwMbN68GaNHj66OHAZHs9gIIaS8rKwsuLq66rx+kIMd9rdvVel64Rdv4uSTfJ32+fDhQ7XbyRCiq2qfxbZ161acPXsW8fHxmDdvHoKCgtCiRQsEBQVh3rx5uHTpEs6cOYOYmBi8/fbbOH36NGxtbbF69Wpegci/5HI5Ll++bNIzA/RFmcWBMgtTA0tzg65nisRwnMsSQmZeHaRff/0Vo0aN0vpXRIMGDTBy5Ej8/PPPAIBGjRphwIABuHLlCv9KCYAXp7fT09NNemaAviizOFBm0+Dk5ISHDx/q/LX6j2067Xf1H9t03qeTk1M1pzQsUzzOVSWEzLzGIN29exeWlpYVrmNlZYW7d++q/u3h4YHi4mI+L0cIIcRISCQSvS5vsfo9UeTkApaTpXUdztkV9YN6iuYO6sQ08DqD1KhRI/z1119aOzzFxcX466+/1GauZWVloV69evyqJIQQYpI4qRQWU2ZVuI7F5A+pc0SMDq8O0qRJk3D79m10794du3fvxqNHjwAAjx49wu7du9G9e3ekpKSo3Q7g5MmT8Pf3N0zVIiaRSODj42PSMwP0RZnFgTILl1lQCCwjloJzUj/zxDm7wlIEU/zFcpxfJoTMvGaxyeVyvPXWW9i4caNqur9EIlFda2SMYezYsdiwYQMkEgkePnyIpUuX4tVXX0VYWJhhE1QTmsVGCCGGxeRyKK7Fg+XmgHN0gsQvgM4ckRpV7bPYpFIpNmzYgCNHjmDcuHEICAiAl5cXAgICMH78eBw+fBgbN25U9RxdXV2xcuVKk+kcGTOZTIbY2FjIZLLaLqXGUGZxoMzCx0mlYK39cc7KDqy1v2g6R2I7zoAwMvMapH3ixAnY29sjNDQUoaGhhq6JVIAxhuzsbI036BQqyiwOlFkcKLM4CCEzrzNIISEhWLt2raFrIYQQQggxCrw6SC4uLrCysjJ0LYQQQgghRoFXB6lPnz44duyYSZ86M1VSqRQBAQGQiuTaPUCZxYIyiwNlFgchZOY1iy0zMxNdunRB37598eWXX8LR0bE6aqtVNIuNEEIIEZZqn8X2xhtvwMHBAb/99hsaNWqEV155BSEhIapB28qvXr168QpAtJPJZIiOjjbpmQH6osziQJnFgTKLgxAy85rFduzYMdX/l5SUICEhAQkJCeXWU94jiRgOYwz5+fmiurxJmcWBMosDZRYHIWTm1UEy5YfPEUIIIYRUxnTvAU4IIYQQUk14DdJ+WUFBAZKSkvDs2TMEBQUZqq5aZ6yDtBUKBXJycuDk5GTSz7jRB2WmzEJFmSmzUBlr5mofpA0AaWlpGDx4MOrVq4eOHTsiJOTfhw2ePn0ar7zyitpYJWIYEokELi4uRvWGq26UWRwoszhQZnEQQmZelaenp6Nz586IiorC4MGD0aVLF7WBWIGBgcjJycGWLVsMVih5obS0FPv27UNpaWltl1JjKLM4UGZxoMziIITMvDpIERERePz4MY4fP44dO3agT58+asvNzMwQFBSE06dPG6RIos6Up03yRZnFgTKLA2UWB1PPzKuDdPDgQQwdOhRdu3bVuo6npyfu3bvHuzBCCCGEkNrCq4OUm5sLLy+vCtdhjKGkpITP7gkhhBBCahWvDpKrqyuSk5MrXOfatWvw8PDgVRTRzszMDCEhITAz43ULK5NEmcWBMosDZRYHIWTm/bDavXv34urVqxqXnzx5EtHR0ejXr1+ViiOaWVtb13YJNY4yiwNlFgfKLA6mnplXB+nTTz+FtbU1goOD8fnnn+PWrVsAgP379+Ozzz7Dq6++CicnJ8yZM8egxZIXg96ioqJMfvCbPiizOFBmcaDM4iCEzLzOfXl5eeHgwYMYM2YMPvvsM3AcB8YYBgwYAMYYPDw8sGPHDri5uRm6XkIIIYSQasf74mBgYCCSk5OxZ88enD17Frm5ubC3t0dgYCAGDx4MCwsLQ9ZJCCGEEFJjqjR6yszMDEOHDsXQoUMNVQ8hhBBCSK2r8rPYhMpYn8XGGINMJoOZmRk4jqvtcmoEZabMQkWZKbNQGWtmfT7beZ9Bev78Of766y+cP38eT548gVwuL7cOx3H49ddf+b4E0aKoqAh2dna1XUaNosziQJnFgTKLg6ln5tVBunPnDvr06YPbt2+johNQ1EEyPJlMhpiYGPTr1w/m5ua1XU6NoMyUWagoM2UWKiFk5tVB+vDDD3Hr1i28+eabmDhxIho3bmzSN4MihBBCCHkZr15NdHQ0evXqhfXr1xu6HkIIIYSQWsfrRpEKhQJt27Y1dC1ER2I8W0eZxYEyiwNlFgdTz8xrFlufPn1gZWWFPXv2VEdNRsFYZ7ERQgghhB99Ptt5nUFaunQpoqOjsWPHDl4FEv4UCgWysrKgUChqu5QaQ5nFgTKLA2UWByFk5tVB2rdvH0JCQjB69GiEhobio48+wqJFi8p9/e9//+NVVElJCT7++GM0bNgQ1tbWCAwMxOHDh3Xa9t69exg1ahQcHBxgb2+PwYMHIyUlhVcdxkgulyMuLk7jbRWEijKLA2UWB7FlZnI5Si+fR8r6X1B6+TyYSHIL4TjzukC4YMEC1f8fO3YMx44d07gex3H47LPP9N7/hAkTsGPHDsycORPNmzfHunXr0K9fP8TExKB79+5atysoKEBISAiePn2KTz75BObm5li5ciV69OiB+Ph41K9fX+9aCCGEED5kJ2Pw/IcVYDlZ8AMgO/g35E4usJgyC2ZBIbVdHqkErw5STEyMoetQOXfuHP744w98/fXX+OijjwAA48aNQ+vWrTF37lzExsZq3Xb16tVITk7GuXPn0LFjRwBAeHg4WrdujeXLl+OLL76otroJIYQQJdnJGJQsnFeuneVkvWiPWEqdJCPHq4PUo0cPQ9ehsmPHDkilUrz77ruqNisrK0yaNAmffPIJMjIy4O7urnXbjh07qjpHANCyZUv06tUL27ZtE0QHieM42NnZGdWt26sbZRYHyiwOpphZoVAgJydHjw3ksF71NTgAmlIyAEXfL0NR85aARKrTLp2cnCCR8BoVUytM8TiXZXRz8C5fvowWLVqUG13eqVMnAEB8fLzGDpJCocDVq1cxceLEcss6deqEQ4cOIT8/X+ttz0tKSlBSUqL6d15eHgCgtLQUpaWlAACJRAKpVAq5XK428EzZLpPJ1O4sLpVKIZFItLYr96uknBIpk8kqbA8KClLtT6FQqF3j5TgOZmZmWtu11V7bmZTMzc011h4aGgqFQqG2f1PPVNFx4jhOdZxLS0sFkUmX4xQUFKT6hSqUTC+3a6o9NDQUMplMrX5Tz1TRcWKMqb23TSHTo0eP4ObmBl0FOdhhf/tWWpdzALhHORjRygcnn+TrtM+7d+/CxcXFpH7v9ezZE3K5XLWv2n7vvfx5rgveHSSZTIZVq1Zhy5YtSEhIQGFhoeobFB8fj7Vr12LmzJlo0aKFXvu9f/++xjeisi0zM1Pjdrm5uSgpKal0Wx8fH43bL1myBAsXLizXfujQIdjY2AAAPDw80LZtW1y9ehXp6emqdXx8fNCyZUucO3cO2dnZqvaAgAB4enrixIkTyM//94egS5cucHFxwaFDh9TeVCEhIbC2tkZUVJRaDf369UNRUZHapU0zMzP0798fOTk5iIuLU7Xb2dkhNDQUGRkZiI+PV7U7Ozuja9euSE5ORmJioqrdFDLZ2tqiWbNmqk6wEDJVdpySkpKQlJQkqEy6HqcWLVqgVatWgsqk7Ti1adMGEokEt27dQkFBgSAyCfE4NW7cGPpoYKnbozV0XQ8Ajhw5AgcHB5M5TuHh4bh+/braJCljeO8pT37ogtd9kIqKitC3b1/ExsbCyckJ5ubmuH//vqpH+PTpUzRo0ACzZ8/G4sWL9dq3t7c3fHx8yh2ElJQUeHt7Y+XKlZg5c2a57TIyMuDh4YEvv/wSc+fOVVv222+/YdKkSbh8+TICAgI0vq6mM0ju7u7IyclRnc0yhh56aWkpDh8+jD59+sDGxsbo/jqsjr94ZTIZDh06hFdffVXtFLMpZ6rsOBUXF+PgwYPo06cPzM3NBZGpsuOkfG+HhYXByspKEJnKtpetXaFQ4MCBA+jbt6/aTfVMOVNlx6mwsFD1O8zc3NwkMnEch9zcXJ3fe5ZJN2H9+X9QmYJ5iyBr1VqnTMpLbKbyew8AoqKiVMdZU6ayWWsiU15eHpycnHS6DxKvM0hffPEFTp8+jaVLl2LOnDlYuHCh2pT+unXrokePHjh48KDeHSRra2u1jopScXGxarm27QDw2hYALC0tYWlpWa7d3Ny83IP2pFIppNLy14213TVUW7u2B/jp0q78f4lEovG6tLZ2bbUbQyalijJpWt/UM2k7Tsp9vbw/U8+ky3FS/r+QMimVrV35IWRmZqaxHlPMVFm7ssay721jz+Ti4qKxPk1YgwYo+skFLCdL6zqcsytcQnqD0/D6ujD233vK97amz9DafO/p8+BcXiO+tm7dipCQEMydOxccx2kchNW0aVO1U2K6cnNzw/3798u1K9saNmyocTtHR0dYWlry2pYQQggxFE4qhcWUWRWuYzH5Q96dI1IzeHWQ0tPT0aFDhwrXsbOzw9OnT/Xed0BAAJKSkspdJzx79qxquSYSiQR+fn64cOFCuWVnz55F06ZNtQ7QNiUcx8HZ2dmkZwboizKLA2UWB7FkNgsKgWXEUnBO6meeOGdXWIpgir8QjjOvDpKdnR2ysrSfOgSA27dvw9nZWe99jxgxAnK5HGvXrlW1lZSUIDIyEoGBgaoZbOnp6UhISCi37fnz59U6SYmJiYiOjsbIkSP1rsUYmZmZoWvXrib/EEB9UGZxoMziIKbMZkEhsN70F6yWrYblJ4tgtWw1rDfuEnznCBDGcebVQercuTP27NmDJ0+eaFyekZGBqKgoBAcH673vwMBAjBw5EvPnz8fcuXOxdu1ahIaGIi0tDV999ZVqvXHjxqFVK/VplJMnT4a3tzf69++Pr7/+Gt988w369OkDV1dXzJ49W+9ajJFcLkdCQoJJ375dX5RZHCizOIgtMyeVAn4BuNXQE/ALEM1lNSEcZ14dpDlz5uDx48fo1asXTp8+rRq9XlhYiKNHjyIsLAwymQyzZlV8DVabDRs2YObMmfj9998xffp0lJaWYu/evZV2uOzs7HDs2DEEBwdj8eLF+Oyzz+Dv74/jx4/zOptljBQKBRITE036AYD6osziQJnFgTKLgxAy8zr3FRwcjO+//x4zZsxQ67Qox/hIpVKsXr0a7du351WUlZUVvv76a3z99dda19H2/LfGjRtj+/btvF6XEEIIIQSowo0iP/jgA/Ts2RM//vgjzp49i9zcXNjb2yMwMBCTJ0+Gr6+vIeskhBBCCKkxVRo91apVK3z77beGqoXoQCKRwMPDw6SeyVNVlFkcKLM4UGZxEEJmXnfSFoO8vDzUrVtXp7ttEkIIIcT46fPZbrpdO5GSy+W4fPmySc8M0BdlFgfKLA6UWRyEkJk6SCZGoVAgPT3dpGcG6IsyiwNlFgfKLA5CyEwdJEIIIYSQMkz3FpfVTDk0q+wjT2pbaWkpCgsLkZeXp9dD90wZZabMQkWZKbNQGWtm5We6LsOvqYOkRX5+PgCoHm1CCCGEEGHIz89H3bp1K1yHZrFpoVAokJmZCTs7O6N62F5eXh7c3d2RkZEhmtl1lJkyCxVlpsxCZayZGWPIz89Hw4YNK70FAZ1B0kIikaBx48a1XYZW9vb2RvWmqwmUWRwoszhQZnEwxsyVnTlSokHahBBCCCFlUAeJEEIIIaQM6iCZGEtLS0RERMDS0rK2S6kxlFkcKLM4UGZxEEJmGqRNCCGEEFIGnUEihBBCCCmDOkiEEEIIIWVQB4kQQgghpAzqIBFCCCGElEEdJEIIIYQHLy8vTJgwobbLINWEOkhG6vbt23jvvffQtGlTWFlZwd7eHt26dcO3336Ljz/+GBzHVfrVs2fP2o6hF7FlrihvUVGRaj25XI7IyEj07NkTjo6OsLS0hJeXF9566y1cuHCh2utct25dhd/zM2fOGPT1FAoF1q1bh0GDBsHd3R116tRB69atsXjxYhQXF2vd7ubNm+A4DlZWVnjy5InGdXr27InWrVsbtN7KpKamYurUqWjRogVsbGxgY2ODV155BVOmTMHVq1c1bjN37lxwHIfRo0fXaK2GIoTMFb3v582bV9vl1Qhdf/ZfbpNIJGjYsCH69u2LY8eOqe3Py8sLHMehd+/eGl/v559/Vu2nJn63VYYeNWKE9u3bh5EjR8LS0hLjxo1D69at8fz5c5w6dQpz5sxB9+7d8fvvv6vWLygowAcffIChQ4di2LBhqnZXV9faKJ8XsWWuLO/169exdu1aFBUVYdiwYThw4ACCg4PxySefwNHREWlpadi2bRvWr1+P9PT0GnkszqJFi9CkSZNy7c2aNTPo6xQWFuKtt95C586d8f7778PFxQVxcXGIiIjA0aNHER0drfH5iBs3bkSDBg3w+PFj7NixA2+//bZB6+Jj7969GD16NMzMzPD666/D398fEokECQkJ+PPPP7FmzRqkpqbC09NTtQ1jDFu2bIGXlxf27NmD/Px82NnZ1WIK/Qgts6b3fU13smubLj/7ffr0wbhx48AYQ2pqKlavXo3Q0FDs27cP4eHhqvWsrKwQExODBw8eoEGDBmr727RpE6ysrCr8Q6hGMWJUUlJSmK2tLWvZsiXLzMwstzw5OZl98803am3Z2dkMAIuIiKihKg1LbJn1yTtlyhQGgK1cubLcejKZjH399dcsIyOjWuuNjIxkANj58+er9XWUSkpK2OnTp8u1L1y4kAFghw8fLrdMoVAwLy8vNmvWLDZ06FDWs2dPjfvu0aMH8/X1NXjNmty6dYvVqVOHtWrVSuNxLi0tZd9++y1LT09Xa4+OjmYAWHR0NDM3N2fr1q2rkXoNQUiZdXnfe3p6svHjx9dcUTVM1599AGzKlClqbVevXmUAWN++fVVtnp6erFevXsze3r7c7/SMjAwmkUjY8OHDa/T3TUXoEpuR+eqrr1BQUIBff/0Vbm5u5ZY3a9YMM2bMqIXKqo/YMuua9+7du/jpp5/Qp08fzJw5s9x6UqkUH330kdE8VFmhUODbb7+Fn58frKys4OzsjFdffVXtVLlMJsP//vc/eHt7qy4VfvLJJygpKVGtY2Fhga5du5bb/9ChQwG8uJRW1unTp5GWloYxY8ZgzJgxOHHiBO7evVsNKXX31Vdf4dmzZ4iMjNR4nM3MzDB9+nS4u7urtW/atAmvvPIKQkJC0Lt3b2zatKmmSq4yMWYuKyUlBSNHjoSjoyNsbGzQuXNn7Nu3T22dY8eOgeM4bN26FZ988gkaNGiAOnXqYNCgQcjIyFBbNzk5GcOHD0eDBg1gZWWFxo0bY8yYMXj69GlNxtKbn58fnJyckJqaqtZuZWWFYcOGYfPmzWrtW7ZsQb169RAWFlaTZVaILrEZmT179qBp06YaPyCESmyZdc27f/9+yGQyvPnmmzVUWcWePn2KnJwctTaO41C/fn0AwKRJk7Bu3TqEh4fj7bffhkwmw8mTJ3HmzBl06NABAPD2229j/fr1GDFiBGbPno2zZ89iyZIluHnzJnbt2lXh6z948AAA4OTkVG7Zpk2b4O3tjY4dO6J169awsbHBli1bMGfOHENE52Xv3r1o1qwZAgMDdd6mpKQEO3fuxOzZswEAr732Gt566y2NlyOMkRAza3rfa3oPAsDDhw/RtWtXFBYWYvr06ahfvz7Wr1+PQYMGYceOHapOvtLnn38OjuPw8ccfIysrC9988w169+6N+Ph4WFtb4/nz5wgLC0NJSQmmTZuGBg0a4N69e9i7dy+ePHmi81Ppq6qyn31NHj9+jMePH2u8BD927Fj07dsXt2/fhre3NwBg8+bNGDFiBMzNzQ1bfFXU9iks8q+nT58yAGzw4MF6bWfKl5vEllmfvB9++CEDwC5fvlztdVVEeZpd05elpSVj7N9LJNOnTy+3vUKhYIwxFh8fzwCwt99+W235Rx99pLq8UpHevXsze3t79vjxY7X258+fs/r167P//Oc/qraxY8cyf3//cvuoqUtsyuM8ZMiQcsseP37MsrOzVV+FhYWqZTt27GAAWHJyMmOMsby8PGZlZaXxEquxEVrmit73SmUvsc2cOZMBYCdPnlS15efnsyZNmjAvLy8ml8sZY4zFxMQwAKxRo0YsLy9Pte62bdsYAPbtt98yxhi7fPkyA8C2b99ezWk10+Vnn7EXl9gmTZrEsrOzWVZWFjt79izr1asXA8CWL1+uWs/T05P179+fyWQy1qBBA/a///2PMcbYjRs3GAB2/PjxGr+kXxG6xGZE8vLyAMBoBifWBLFl1ievsX1vfvjhBxw+fFjta//+/QCAnTt3guM4RERElNtOOaA6KioKADBr1iy15cozB2UvQ7zsiy++wJEjR7B06VI4ODioLdu/fz8ePXqE1157TdX22muv4cqVK7h+/br+QQ1AeexsbW3LLevZsyecnZ1VXz/88INq2aZNm9ChQwfVX912dnbo37+/SVxyEmpmTe97baKiotCpUyd0795d1WZra4t3330XaWlpuHHjhtr648aNU/v5HjFiBNzc3FQ/K8ozRAcPHkRhYaEhY+mlop99pV9//RXOzs5wcXFBYGAgTp8+jVmzZmkdHjBq1Chs2bIFwIv3gLu7O4KCgmoijs7oEpsRsbe3BwDk5+fXciU1R2yZ9clrbN+bTp06qS6VlXX79m00bNgQjo6OWre/c+cOJBJJuVPuDRo0gIODA+7cuaNxu61bt+LTTz/FpEmT8MEHH5RbvnHjRjRp0gSWlpa4desWAMDb2xs2NjbYtGkTvvjiC10jGozyQ6+goKDcsp9++gn5+fl4+PAh3njjDVX7kydPEBUVhalTp6pyAEC3bt2wc+dOJCUloUWLFtVfPE9CzVzR+76sO3fuaLy82KpVK9Xyl2fANW/eXG09juPQrFkzpKWlAQCaNGmCWbNmYcWKFdi0aROCgoIwaNAgvPHGGzV2eQ3Q7XswePBgTJ06FRzHwc7ODr6+vqhTp47W9ceOHYvvvvsOV65cwebNmzFmzBiNs1NrE3WQjIi9vT0aNmyIf/75p7ZLqTFiy6xP3pYtWwIArl27hoCAgGqurObo80vw8OHDGDduHPr3748ff/yx3PK8vDzs2bMHxcXF5T5sgBfjGpTjPGpS3bp14ebmpvE4Kz9AlR+CStu3b0dJSQmWL1+O5cuXl9tu06ZNWLhwYbXUawhizFwTli9fjgkTJuDvv//GoUOHMH36dCxZsgRnzpwxmgkaANC4cWOt9zfSJDAwEN7e3pg5cyZSU1MxduzYaqyOH7rEZmQGDBiA27dvIy4urrZLqTFiy6xr3vDwcEilUmzcuLGGKuPP29sbmZmZyM3N1bqOp6cnFAoFkpOT1dofPnyIJ0+eqN0XBwDOnj2LoUOHokOHDti2bRvMzMr/Pffnn3+iuLgYa9aswfbt29W+Fi9ejDt37uD06dOGCamn/v3749atWzh37pxO62/atAmtW7cul2P79u3o3bt3uVk/xkiMmV/m6emJxMTEcu0JCQmq5S8r+7PAGMOtW7fg5eWl1u7n54dPP/0UJ06cwMmTJ3Hv3j2NfzCYmtdeew3Hjh1Dq1atjPOPwNoeBEXUKe8j8sorr7AHDx5oXC6kewIxJr7M+uR9//33GQD23XfflVtPLpezZcuWGcV9kPQZpP3uu++qLZ87d265Qdo3btxg9evXZ76+viw3N1fr6/bq1Ys1bdpU47Li4mJma2vL3n//fVVbTd4HKSkpidnY2DBfX1+NxzklJYUBYF9//TVLT09nHMexRYsWadzXpk2bGAB25syZ6i67SoSUmc99kJSDtGNjY1VtBQUFrGnTpnoN0lb+/D99+pSVlpaqvWZeXh6TSCTso48+MkTMClXlPkiaKAdpK6WlpbGIiAgWFRWl92vWBLrEZmS8vb2xefNmjB49Gq1atVK7y3JsbCy2b98uuGf/iC2zPnmXL1+O27dvY/r06fjzzz8xYMAA1KtXD+np6di+fTsSEhIwZsyYGql7//79qr+EX9a1a1eEhITgzTffxHfffYfk5GS8+uqrUCgUOHnyJEJCQjB16lT4+/tj/PjxWLt2LZ48eYIePXrg3LlzWL9+PYYMGYKQkBAAL8ZchYWF4fHjx5gzZ065wdve3t7o0qULMjMzERMTg+nTp2us19LSEmFhYdi+fTu+++67Gp8+3Lx5c2zevBmvvfYafHx8VHeVZv9/p+HNmzdDIpGgcePG2Lx5MxhjGDRokMZ99evXD2ZmZti0aZNeU+hrmhgzv2zevHnYsmULwsPDMX36dDg6OmL9+vVITU3Fzp07IZGoX7RxdHRE9+7d8dZbb+Hhw4f45ptv0KxZM7zzzjsAgOjoaEydOhUjR45EixYtIJPJ8Pvvv0MqlWL48OE1lquin/2mTZvy3q+npycWLFhQhcqqWe32z4g2SUlJ7J133mFeXl7MwsKC2dnZsW7durFVq1ax4uJitXVN+WzKy8SWWde8MpmM/fLLLywoKIjVrVuXmZubM09PT/bWW2/VyC0AKprqC4BFRkaq6vz6669Zy5YtmYWFBXN2dmbh4eHs4sWLqn2VlpayhQsXsiZNmjBzc3Pm7u7O5s+fr5Y3NTW1wtdT/sW+fPlyBoAdPXpUa+3r1q1jANjff//NGKvZM0hKt27dYh988AFr1qwZs7KyYtbW1qxly5bs/fffZ/Hx8Ywxxvz8/JiHh0eF++nZsydzcXEpd0bBGAkhM987ad++fZuNGDGCOTg4MCsrK9apUye2d+9etXWUZ5C2bNnC5s+fz1xcXJi1tTXr378/u3Pnjmq9lJQUNnHiRObt7c2srKyYo6MjCwkJYUeOHDFoVm10/dkHzzNIFb2mMZxB4hhjrEZ6YoQQQgjBsWPHEBISgu3bt2PEiBG1XQ7RggZpE0IIIYSUQR0kQgghhJAyqINECCGEEFIGjUEihBBCCCmDziARQgghhJRBHSQROXbsGDiOw7Fjx2q7FEJEacGCBbwfe/L777+jZcuWMDc3L/fAXkKI4dGNIgkhxMglJCRgwoQJePXVVzFv3jzY2NjUdkkGs3r1atjY2AjqZrBEGKiDRAghRu7YsWNQKBT49ttv0axZs9oux6BWr14NJycn6iARo0OX2EyAQqFAcXFxbZdBCKklWVlZAECX1gipQdRBqkHK8QcJCQkYNWoU7O3tUb9+fcyYMUOtA8RxHKZOnYpNmzbB19cXlpaWOHDgAADg3r17mDhxIlxdXWFpaQlfX1/89ttv5V7r7t27GDJkCOrUqQMXFxd8+OGHKCkpqdGcSUlJeOONN1C3bl04Ozvjs88+A2MMGRkZGDx4MOzt7dGgQQMsX75cbfuSkhJERESgWbNmsLS0hLu7O+bOnVuu/sjISISGhsLFxQWWlpZ45ZVXsGbNmnL1XLhwAWFhYXBycoK1tTWaNGmCiRMnVuv3gJBTp06hY8eOsLKygre3N3766SeN623cuBHt27eHtbU1HB0dMWbMGGRkZKiWe3l5ISIiAgDg7OwMjuNq7flVd+7cweTJk+Hj4wNra2vUr18fI0eORFpamtp62sZarVu3DhzHqdb38vLC9evXcfz4cXAcB47j0LNnT9X6KSkpGDlyJBwdHWFjY4POnTuXezYfIdWFLrHVglGjRsHLywtLlizBmTNn8N133+Hx48fYsGGDap3o6Ghs27YNU6dOhZOTE7y8vPDw4UN07txZ1YFydnbG/v37MWnSJOTl5WHmzJkAgKKiIvTq1Qvp6emYPn06GjZsiN9//x3R0dE1mlP5MNalS5di3759WLx4MRwdHfHTTz8hNDQUX375JTZt2oSPPvoIHTt2RHBwMBQKBQYNGoRTp07h3XffRatWrXDt2jWsXLkSSUlJ+Ouvv1T7X7NmDXx9fTFo0CCYmZlhz549mDx5MhQKBaZMmQLgxV/effv2hbOzM+bNmwcHBwekpaXhzz//rNHvBRGXa9euqd53CxYsgEwmQ0REBFxdXdXW+/zzz/HZZ59h1KhRePvtt5GdnY1Vq1YhODgYly9fhoODA7755hts2LABu3btwpo1a2Bra4s2bdrUSq7z588jNjYWY8aMQePGjZGWloY1a9agZ8+euHHjht5jo7755htMmzYNtra2+M9//gMAqu/Rw4cP0bVrVxQWFmL69OmoX78+1q9fj0GDBmHHjh0YOnSowfMRoqZWnwQnMhEREQwAGzRokFr75MmTGQB25coVxtiLB/9JJBJ2/fp1tfUmTZrE3NzcWE5Ojlr7mDFjWN26dVlhYSFjjLFvvvmGAWDbtm1TrfPs2TPWrFkzBoDFxMRUQ7p/KXO+++67qjaZTMYaN27MOI5jS5cuVbU/fvyYWVtbqx74+PvvvzOJRMJOnjypts8ff/yRAWCnT59WtSnzviwsLIw1bdpU9e9du3YZzYMPiXgMGTKEWVlZqT149MaNG0wqlTLlr920tDQmlUrZ559/rrbttWvXmJmZmVq78mcqOzu7ZgJooelnLi4ujgFgGzZsULUp6y1L+SDS1NRUVZuvry/r0aNHuXVnzpzJAKj9LsjPz2dNmjRhXl5eTC6XVy0MIZWgS2y1QHl2Q2natGkAgKioKFVbjx498Morr6j+zRjDzp07MXDgQDDGkJOTo/oKCwvD06dPcenSJdV+3Nzc1B6CaGNjg3fffbc6Y5Xz9ttvq/5fKpWiQ4cOYIxh0qRJqnYHBwf4+PggJSUFALB9+3a0atUKLVu2VMsYGhoKAIiJiVFta21trfr/p0+fIicnBz169EBKSgqePn2q2j8A7N27F6WlpdWWlRAluVyOgwcPYsiQIfDw8FC1t2rVCmFhYap///nnn1AoFBg1apTae71BgwZo3ry52nvdWLz8M1daWopHjx6hWbNmcHBwUP3+MZSoqCh06tQJ3bt3V7XZ2tri3XffRVpaGm7cuGHQ1yOkLLrEVguaN2+u9m9vb29IJBK16/hNmjRRWyc7OxtPnjzB2rVrsXbtWo37VQ7kvHPnDpo1a1ZuDICPj48Bqtfdyx8OAFC3bl1YWVnBycmpXPujR48AAMnJybh58yacnZ017lOZEQBOnz6NiIgIxMXFobCwUG29p0+fom7duujRoweGDx+OhQsXYuXKlejZsyeGDBmCsWPHwtLS0hAxCVGTnZ2NoqKicj/nwIufQeUfQsnJyWCMaVwPAMzNzau1Tj6KioqwZMkSREZG4t69e2AvPYhB+UeJody5cweBgYHl2lu1aqVa3rp1a4O+JiEvow6SEdA0mPHlv9SAFzPZAOCNN97A+PHjNe6ntsYlaCOVSnVqA6D6RatQKODn54cVK1ZoXM/d3R0AcPv2bfTq1QstW7bEihUr4O7uDgsLC0RFRWHlypWq7xfHcdixYwfOnDmDPXv24ODBg5g4cSKWL1+OM2fOwNbW1hBRCdGbQqEAx3HYv3+/xp8LY3xvTps2DZGRkZg5cya6dOmCunXrguM4jBkzRvUzB2j+nQa8OLtGiKmgDlItSE5OVjtDdOvWLSgUCnh5eWndxtnZGXZ2dpDL5ejdu3eF+/f09MQ///wDxpjaL6rExMQq117dvL29ceXKFfTq1avCOw7v2bMHJSUl2L17t9qZKm2XJTp37ozOnTvj888/x+bNm/H666/jjz/+ULsMSIghODs7w9raGsnJyeWWvfwz6O3tDcYYmjRpghYtWtRkibzt2LED48ePV5t5WlxcjCdPnqitV69ePQDAkydP1G5NcOfOnXL71PZz7unpqfF3VkJCgmo5IdWJxiDVgh9++EHt36tWrQIAhIeHa91GKpVi+PDh2LlzJ/75559yy7Ozs1X/369fP2RmZmLHjh2qtsLCQq2X5ozJqFGjcO/ePfz888/llhUVFeHZs2cA/j0TVfYUf2RkpNo2jx8/VlsHAAICAgCgxm57QMRFKpUiLCwMf/31F9LT01XtN2/exMGDB1X/HjZsGKRSKRYuXFjuPcoYU112NiZSqbRcratWrSp3Zsjb2xsAcOLECVXbs2fPsH79+nL7rFOnTrkOFvDi99i5c+cQFxento+1a9fCy8tLbYwmIdWBziDVgtTUVAwaNAivvvoq4uLisHHjRowdOxb+/v4Vbrd06VLExMQgMDAQ77zzDl555RXk5ubi0qVLOHLkCHJzcwEA77zzDr7//nuMGzcOFy9ehJubG37//XeTeDzBm2++iW3btuH9999HTEwMunXrBrlcjoSEBGzbtg0HDx5Ehw4d0LdvX1hYWGDgwIF47733UFBQgJ9//hkuLi64f/++an/r16/H6tWrMXToUHh7eyM/Px8///wz7O3t0a9fv1pMSoRs4cKFOHDgAIKCgjB58mTIZDKsWrUKvr6+uHr1KoAXnYjFixdj/vz5SEtLw5AhQ2BnZ4fU1FTs2rUL7777Lj766KNaTqJuwIAB+P3331G3bl288soriIuLw5EjR1C/fn219fr27QsPDw9MmjQJc+bMgVQqxW+//QZnZ2e1TiMAtG/fHmvWrMHixYvRrFkzuLi4IDQ0FPPmzcOWLVsQHh6O6dOnw9HREevXr0dqaip27twJiYT+vifVrFbmzomUcurrjRs32IgRI5idnR2rV68emzp1KisqKlKtB4BNmTJF4z4ePnzIpkyZwtzd3Zm5uTlr0KAB69WrF1u7dq3aenfu3GGDBg1iNjY2zMnJic2YMYMdOHCgRqf5l52SPH78eFanTp1y6/fo0YP5+vqq/v38+XP25ZdfMl9fX2Zpacnq1avH2rdvzxYuXMiePn2qWm/37t2sTZs2zMrKinl5ebEvv/yS/fbbb2rTiC9dusRee+015uHhwSwtLZmLiwsbMGAAu3DhQvWEJ+T/HT9+nLVv355ZWFiwpk2bsh9//FHj9PedO3ey7t27szp16rA6deqwli1bsilTprDExETVOsYyzf/x48fsrbfeYk5OTszW1paFhYWxhIQE5unpqbpVh9LFixdZYGAgs7CwYB4eHmzFihUap/k/ePCA9e/fn9nZ2TEAalP+b9++zUaMGMEcHByYlZUV69SpE9u7d2/NhCWixzFW5nwpqTYLFizAwoULkZ2dXW4mFyGEEEKMB52jJIQQQggpgzpIhBBCCCFlUAeJEEIIIaQMGoNECCGEEFIGnUEihBBCCCmDOkiEEEIIIWVQB4kQQgghpAzqIBFCCCGElEEdJEIIIYSQMqiDRIgRWLduHTiOw7p162q7FJ0sWLAAHMfh2LFjtV2KUUtLSwPHcZgwYUKtvH7Pnj3BcVytvLaxqu1jQkwHdZAIqQGm9kv52LFj4DgOCxYsqO1SjB51QvTn5eUFLy+v2i6DkAqZ1XYBhBBg6NCh6Ny5M9zc3Gq7FJ1MnToVY8aMgYeHR22XQggh1YI6SIQYgbp166Ju3bq1XYbOnJyc6IHLhBBBo0tshGhw4sQJDBw4EE5OTrC0tETz5s3x6aeforCwsNy6O3fuRI8ePeDi4gIrKys0bNgQvXv3xs6dOwG8GF/UpEkTAMD69evBcZzqSzmGR9sYJI7j0LNnT9y7dw9jx46Fk5MT7Ozs0L9/f6SkpAAAbt68iSFDhsDR0RF2dnYYMWIEHj58WK7O3377DYMHD4aXlxesrKzg6OiIsLAwxMTEqK23YMEChISEAAAWLlyoVm9aWppqHW1jkPbs2YOQkBDUrVsX1tbW8Pf3x4oVKyCTydTWe/my461btzB06FDUq1cPderUQe/evXHlypWKD9JLJkyYAI7jkJKSgmXLlqFFixawtrbGK6+8gj/++AMA8Pz5c/znP/9R5W/Tpg3279+vcX/5+fmIiIiAr68vrK2t4eDggLCwMJw6dUptPY7jcPz4cdX/K780XUrVJ+M///yDUaNGwcXFBZaWlmjSpAlmzpyJR48eaVz/1KlT6NGjB+rUqYP69etj9OjRyMjI0PXbpyYyMhKBgYGwtbWFra0tAgMDNY6Ne/kybGxsLPr27QsHB4cKLzcqj/mdO3dw584dte9Z2cu5utbBd31CKkNnkAgpY82aNZgyZQocHBwwcOBAuLi44MKFC/j8888RExODmJgYWFhYqNadPHky3NzcMHToUNSvXx8PHjzAuXPnsGvXLgwfPhwBAQGYMWMGvv32W/j7+2PIkCGq19JlHMbjx4/RvXt3NGjQAOPHj0dSUhL27t2LhIQE/P333wgKCkL79u0xceJEXLx4ETt37kRubi6io6PV9jNlyhT4+/ujd+/ecHZ2xr179/DXX3+hd+/e+PPPPzF48GAAL8bUpKWlYf369ejRowd69uyp2oeDg0OFta5YsQKzZ8+Go6Mjxo4dizp16mD37t2YPXs2Tp48iT///LPcB2haWho6d+4MX19fTJw4Ebdv38bff/+NkJAQ3Lx5E66urpV+j5RmzZqFs2fPYuDAgZBKpfjjjz8wduxY1KtXD6tWrcKNGzfQv39/FBcXY/PmzRg8eDBu3rwJb29v1T5yc3MRHByM69evo1u3bnj//feRl5enqmn79u2qYxgREYF169bhzp07iIiIUO0jICCAd8ZTp04hLCwMz58/x4gRI+Dl5YW4uDh8++232Lt3L86cOaN29u7o0aMIDw+HRCLB6NGj0bBhQxw9ehTdunVDvXr1dP7eAcD06dOxatUqNGrUCJMmTQLw4g+At956C5cvX8a3335bbpvY2Fh88cUXCAkJwbvvvov09HSt+3dwcEBERAS++eYbAMDMmTNVy15+n+lbB5+6CakUI4SoXL9+nZmZmTF/f3+Wk5OjtmzJkiUMAFu2bJmqrV27dszCwoI9fPiw3L5e3j41NZUBYOPHj9f4upGRkQwAi4yMVGsHwACwDz/8UK39gw8+YACYg4MD++abb1TtCoWC9evXjwFgFy9eVNsmJSWl3OtmZmayhg0bsubNm6u1x8TEMAAsIiJCY70REREMAIuJiVG13bp1i5mZmTEXFxeWnp6uai8uLmbdu3dnANiGDRtU7crvCQC2dOlStf1/+umnDABbsmSJxtcva/z48QwAa9GiBcvKylK1nz17VvV96t69OysoKFAt27p1KwPApk2bpravsWPHMgDs559/Vmt/+PAhc3d3Z87OzqyoqEjV3qNHD6btV6m+GeVyOfP29mYA2IEDB9TWnzNnDgPAJk6cqLZ+06ZNGcdx7OTJk6p2hUKhyqHrr/njx48zAKxVq1bsyZMnqvbc3FzWokULBoCdOHFC1a58jwBgv/32m06voeTp6ck8PT0NUoe+61f2s0iIEnWQCHnJ9OnTy/1CVZLL5czZ2Zm1b99e1dauXTtWp04dlpubW+F+q9JBsrW1Zc+ePVNrP3HiBAPAvL29mUKhUFu2YcMGvT60pk2bxgCwtLQ0VRufDtKiRYsYAPbll1+WW//06dMMAAsNDVW1Kb8nTZo0YXK5XG195bJhw4bplEHZQVq/fn25ZU2bNmUA2PHjx9XaZTIZMzc3Z8HBwaq27OxsJpVK1ep82XfffccAsD179qjadOkg6ZpReVzDw8PL7Ss/P585OjoyKysrVlJSwhj7t3MwcODAcuunpaUxqVSqcwdp4sSJDADbunVruWWbNm0q1zlTvkfatWun0/5fVlEHSd869F2fOkhEV3SJjZCXnDlzBgBw8OBBHD16tNxyc3NzJCQkqP49ZswYzJ07F61bt8bYsWMREhKC7t27w97e3mA1NW/eHDY2Nmptytlubdq0KXfJSrksMzNTrT0lJQVLlixBdHQ07t27h5KSErXlmZmZ8PT05F3n5cuXAahfKlHq0qULrKysEB8fX25ZQEAAJBL14ZCNGzcGADx58kSvGspe2gJefD9SUlLKLZNKpXBxcVH7Pp0/fx5yuRwlJSUab3GQnJwMAEhISMCAAQP0qkuXjBV9D21tbdGhQwccOnQIiYmJ8PPzU41hCgoKKre+p6cn3N3dVePGKlPRayvHpGk6fh07dtRp/7rStw6+dRNSGeogEfKS3NxcAMDnn3+u0/offfQR6tevjzVr1mD58uVYtmwZzMzM0L9/f6xcuVI1OLsqNHW2zMzMKl1WWlqqart16xY6deqEvLw8hISEYODAgbC3t4dEIsGxY8dw/Pjxch0mfeXl5QGAxjFDHMfB1dUV9+7dK7esogxyuVyvGvh8r17+PimP/+nTp3H69Gmtr/Ps2TOD1fVyxoq+h8C/nV/lek+fPgUAuLi4aFzf1dVV5w5SXl4eJBIJnJ2dNe6H4zjV65ZdZkj61sG3bkIqQx0kQl6i/CDLy8uDnZ1dpetzHIeJEydi4sSJePToEU6ePIktW7Zg27ZtSE5OxtWrVyGVSqu77EqtXLkSjx8/xu+//4433nhDbdn777+vmolVFcrv3cOHD8udiWKM4eHDhwY9s1YdlPXNnj0by5Ytq7XX1zQLEQAePHigtp7y1hBZWVka19e2H22vrVAokJ2dXa7DlZWVBcaYxuNn6Jtk6lsH37oJqQxN8yfkJYGBgQD+vdSmj/r162PIkCHYunUrQkNDcePGDdy6dQsAVJ0kfc+IGMrt27cBQDVTTYkxpvFMCZ9627ZtCwAap/6fPXsWxcXFGi+BGZOOHTuC4zjExcXpvI0hj21F38Nnz57hwoULsLa2ho+PDwDA398fAHDy5Mly69+5c0evqf4VvbayzVDHTyqVav1+6VtHTdZNxIU6SIS8ZPLkyTAzM8O0adM0Tld+8uSJaswD8OIXMGNMbZ3S0lLVpRorKysAQL169cBxHO9701SV8oxO2fv4LF26FP/880+59R0dHQFAr3rHjh0LMzMzrFixQm1cz/Pnz/Hxxx8DgNE/aqVBgwYYNWoUYmNj8fXXX5c7tsCLzt7L98Pi873Splu3bvD29sb+/ftx5MgRtWWLFy/Go0eP8Nprr6luM9G9e3c0adIEe/fuVTu2jDF88sknenXaxo8fD+DFva9eviT19OlTLFy4UG2dqnJ0dEROTg6Ki4urXEdN1k3EhS6xEfKS1q1bY/Xq1fjggw/g4+ODfv36wdvbG/n5+UhJScHx48cxYcIE/PjjjwCAIUOGwN7eHp07d4anpydKS0tx+PBh3LhxAyNGjFB1TGxtbdGxY0ecOHECb775Jpo3bw6JRII333yzSgOjdfX+++8jMjISw4cPx6hRo1C/fn2cOXMGly5dQv/+/bFv3z619Vu2bImGDRvijz/+gKWlJRo3bgyO4zBt2jStd/z29vbGl19+idmzZ6NNmzYYNWoU6tSpgz179iAxMRGDBw8ud3nPGK1evRqJiYmYO3cufv/9d3Tp0gUODg7IyMjAhQsXkJycjPv376sGzoeGhmLHjh0YPnw4wsPDYWVlBX9/fwwcOFDv15ZIJFi3bh3CwsLQr18/jBw5Ep6enoiLi8OxY8fg7e2NpUuXqq2/du1a9OvXD71791bdByk6Ohr3799HmzZtcPXqVZ1eOzg4GNOmTcOqVavQunVrDB8+HIwx7Ny5E3fv3sX06dMRHBysdyZNQkNDceHCBYSHhyMoKAgWFhYIDg5WfelTR03WTUSmlmbPEWLUzp07x8aMGcMaNmzIzM3NmZOTE2vXrh2bN28eu3nzpmq91atXs0GDBjFPT09mZWXF6tevzzp16sTWrFnDnj9/rrbPxMRE1q9fP+bg4MA4jlObJl/RNP8ePXqUq6+iqcrapujHxMSwbt26MTs7O+bg4MD69evHLl68qHHKPmOMnTlzhvXo0YPZ2dmp7neTmprKGNM8zV/p77//Vm1naWnJ/Pz82PLly1lpaanOGSrKrolymr+yvpdVNA1f23TzwsJC9tVXX7H27duzOnXqMGtra9akSRM2ZMgQtmHDBrUspaWlbO7cuczDw4OZmZmpZeKb8erVq2zEiBHMycmJmZubM09PTzZjxgyWnZ2tcT8nTpxgwcHBzNramjk6OrKRI0eyO3fuVJhdm99++4117NiR2djYMBsbG9axY0eNt4yo7FYQFcnPz2fvvPMOc3NzU92KoOx+dK1D3/Vpmj/RFceYhnPIhBBCCCEiRmOQCCGEEELKoA4SIYQQQkgZ1EEihBBCCCmDOkiEEEIIIWVQB4kQQgghpAzqIBFCCCGElEEdJEIIIYSQMqiDRAghhBBSBnWQCCGEEELKoA4SIYQQQkgZ1EEihBBCCCmDOkiEEEIIIWX8H0gAKL0HXuPSAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkgAAAHPCAYAAACoQyVSAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAACU8UlEQVR4nOzdd1hT1/8H8PdN2EMRGaIyFGcd4MSNoJYq7l3bqlW71KrVaqsdqLXVto5aW23t16LWUVe17glOcIu2KogKgqIMAQEFJMn5/eEvKSEBkhBIzvXzeh6e1nNPbj5vEsLh3nPuFRhjDIQQQgghREVi6gIIIYQQQswNDZAIIYQQQkqgARIhhBBCSAk0QCKEEEIIKYEGSIQQQgghJdAAiRBCCCGkBBogEUIIIYSUQAMkQgghhJASaIBECCGEEFICDZAIMSJBEPT+6t69e6XUMnfuXAiCgLlz5xplf4mJiRAEAT4+PkbZ38vm5s2bmD59Olq1aoWaNWvC0tISNWvWRMeOHTF79mzcvHlTrb/y+y0IAhITE9W2jR07ttz31cCBA7XW4efnB0EQYG1tjcePH5dZs7bnsbCwgIuLCwIDA/Hzzz+jqKio1Mfv378fc+fORb9+/VC7dm3VPu7fv1/u9+v58+f49ttv4efnB3t7e9SoUQPdu3fH9u3by30sIcZgYeoCCBGTMWPGaLQ9evQIhw4dKnV7kyZNKr0uYjoymQwzZ87Ejz/+CIVCAWdnZ7Rr1w41a9ZEdnY2Ll26hLNnz+K7777D8uXLMXnyZJ337evriy5dumjd1rp1a422Cxcu4Nq1awBeDEA2bNiAqVOn6vU8BQUFiI2NxcmTJ3Hy5Els3rwZR44cga2trcbjRo0ahSdPnuicR+nZs2fo1asXoqKi4OTkhNdeew15eXmIiIjAiRMnMGPGDCxevFjv/RKiF0YIqVSRkZEMAKvqH7f09HR28+ZNlp6ebpT9PX/+nN28eZPdvn3bKPt7WYwYMYIBYNWqVWPh4eFMJpOpbVcoFOzQoUOsdevWbOrUqar2hIQE1fsmISFB7TFjxoxhANiYMWP0quW9995jAFidOnUYANaiRYsy+5f1PJs3b1bV991332l9/Ntvv82++eYbdvDgQZaWlqbqn5ycXObzTp06VVVf8ffvxYsXmYODAwPA9uzZU35gQiqABkiEVDJTDZCI6a1Zs4YBYJaWluzs2bNl9n3+/DmLiopS/dvYA6SnT5+yatWqMQAsIiJCNdA4f/58qY8p73l69erFALCuXbvqVIMuA6TMzExmZWXFALDTp09rbP/qq68YANahQwednpMQQ9EcJEJMqPg8oaSkJIwfPx6enp6wtLTE2LFjVf3++usvTJgwAc2bN0eNGjVgY2ODevXqYdy4cYiLiyt338WtXbsWgiBg7NixePr0KWbPno0GDRrA2toatWrVwpgxY/DgwQON/ZU1B0k5twQAduzYgS5duqBatWqwt7dH586dsX///lK/B/fu3cPYsWNRq1Yt2NjYoGHDhggLC0NBQQG6d+8OQRBw/Pjxcr+XxclkMvzyyy/o1KkTqlevrtrvlClTtGaraAZtGGP4+uuvAQAffPABAgICyuxvaWmJjh076vUc+ti2bRtycnLQvHlzBAUFYcSIEQCANWvWGLzPli1bAgBSU1ONUiPwYt7S8+fP4eXlhc6dO2tsHzVqFADg7NmzSElJMdrzElISDZAIMQPx8fFo1aoV9u/fj4CAAPTv3x8uLi6q7cOHD8fmzZtha2uL4OBghISEQCKRIDw8HG3atEFUVJTez/nkyRN06tQJv/zyC1555RX07t0bjDGsX78enTt3NmjuSFhYGIYNGwYA6NOnDxo2bIioqCj07dsXO3fu1Oh/48YNtG3bFuvWrYNUKsWAAQPQuHFjLFmyBL169SpzAnBpCgsL0bt3b3zwwQe4cuUKOnfujIEDB6KwsBArVqyAv78/Ll++bLQMpfnnn39w9+5dANrnnlU15UBo3Lhxav/9888/kZ+fb9A+c3JyAADu7u5GqPCFK1euAADatm2rdXv9+vXh7OwMAIiJiTHa8xKiwdSHsAgRu7JOsYWFham2vfnmm6ygoEDrPv7880+Wl5en1qZQKNjPP//MALBmzZoxhUKhdd9hYWFq7eHh4arnDAkJYU+ePFFty8zMZP7+/gwA++abb9Qepzzl4+3trVGfcn9OTk4ap5KUdTRq1Ejjca1bt2YA2MiRI9Wy379/nzVu3Fi138jISK3fF20++eQTBoD5+vqqnZp6/vw5Gz9+PAPA6tWrxwoLC42SoTTK02tWVlasqKhI58cpGfMUW1xcnOpUX1pamqq9SZMmDABbv3691seV9TwFBQWsXr16DAD7/vvvdapDmaesU2yDBw9mANi0adNK7dOyZUsGgP300086PS8hhqAjSISYAWdnZ/z000+wtrbWun3EiBGwt7dXaxMEARMnTkTHjh1x/fp1jWXi5bG3t0d4eDiqVaumaqtRowY+/fRTAMDRo0f1TAHMnz9f41TS7NmzUb16ddy6dQvJycmq9lOnTuHy5ctwcHDAzz//rJa9Tp06WLJkid7PX1BQgJ9//hkAsGzZMrXTgZaWlvjxxx/h7u6OhISEUpeL65OhLOnp6QBevLYWFpWzYHjdunWlLvMv7vfffwcA9O/fH66urqp25VEkfU6zFRYWIiYmBkOGDEFCQgJ69eql18q78uTm5gKAxvu9OAcHBwD/HcEipDLQMn9CzEDPnj1RvXr1Mvvcvn0bBw8exO3bt5Gbmwu5XA7gv/kfcXFxeOWVV3R+zrZt28LDw0OjvWnTpgBQ6lydsvTr10+jzdraGvXr18eVK1fw4MEDeHp6AgBOnDgBAHjttddUp0yKCw0NhZOTE7Kzs3V+/osXLyIvLw/Ozs5aa7Gzs8PIkSOxfPlyREZGquazGJrB1Mpa5q8kk8mwbt06AP8NiJRGjx6NOXPm4OTJk7hz5w58fX217mPdunWqfRT3/vvv4+eff4ZEQn9rE/GhARIhZqCsiy/K5XJMnjwZv/76KxhjpfbT969pLy8vre3KI0oFBQV67U/ffSovFlhWdm9vb70GSMpBXb169UrtoxwElDYANNb3RXmkJjMzE3K5HFKpVKfH6aNLly5Yu3ZtmX327duHR48eoU6dOggJCVHb5u7ujj59+mD37t34/fffVZPKSyo+EMvJycHFixeRnJyMX375BS1atMDEiRONkgcAHB0dAQBPnz4ttU9eXh4AqB39JMTYaNhPiBnQdpE9peXLl+OXX36Bu7s7Nm3ahMTEROTn54O9uEwHXn/9dQAoc/CkTWX81W/IPkueDtJ1W2Ux1velTZs2AF5ckPHq1atG2achlKfPCgoKEBgYiC5duqh9KS8cuXbtWtVRyZKUA7G1a9fir7/+QkJCAmbOnAkAmDZtmlHzKQfMSUlJpfbRZXBNSEXRESRCzNzWrVsBAL/++iv69++vsT0+Pr6qSzKKOnXqAIDGbTSKu3fvnkH7TEhIKLWPcmWZsm9ladmyJerVq4eEhASsW7dO65WtK9vDhw9Vlyd4/Pgxzpw5U2rflJQUHDx4EKGhoeXuVyqV4ttvv8W5c+dw8uRJzJgxw6A5a9oov08XL17Uuv3u3bvIzMwEALRq1cooz0mINnQEiRAzp/xl4O3trbHt+vXr3C517tatGwDg4MGDyMrK0th+4MABre1ladu2LRwcHJCZmYndu3drbM/Pz8eff/4JAAgKCjKgat0JgoA5c+YAAFatWoXz58+X2V8mk+Hs2bNGrUF5VCggIEB1xFHb16xZswDoN1lbEAQsW7YMgiDg2LFjiIyMNErNffr0gZWVFZKSkrQO6DZt2gQA6NChA2rXrm2U5yREGxogEWLmlJOmf/75ZygUClX7w4cPMXr0aMhkMlOVViHdunWDn58fcnNz8eGHH+L58+eqbSkpKZgxY4be+7SxscGkSZMAADNmzFA7AlVUVISpU6fi0aNHqFevHoYOHVrxEOWYMGEChg4diqKiIvTq1Qvr1q3TOI3FGENERAQ6deqkGrwZi3L1WnnXYRo9ejQAYO/evarVd7po3bq16ppRYWFhBlaprkaNGvjggw8AABMnTlS7oe7ly5fx7bffAgA+++wzozwfIaWhARIhZm7OnDmwsrLCb7/9hsaNG2PEiBHo3bs3fH19UVhYiEGDBpm6RIMIgoANGzbA2dkZGzduRP369TFixAj069cPjRo1grOzs+rK0lZWVjrvd968eejRowdu376Npk2bIjQ0FCNHjkSDBg3w22+/oWbNmti2bZte+6yITZs2YfLkycjNzcXYsWPh7u6O3r1744033kDfvn1Rp04d9OjRA5cvX0ajRo2M9rwnTpzA7du3YW1tjZEjR5bZt1mzZmjdujWKioqwfv16vZ5nwYIFsLCwwKlTp3DkyBG1bV999RU6dOig+lLq37+/qk3bBO9vvvkGHTt2xLVr19CwYUMMHToUvXv3RocOHZCXl4fp06ejb9++etVJiL5ogESImQsICMDFixfRv39/PH36FLt378adO3fw4YcfIjo6muuVPM2bN8elS5fw1ltvoaioCLt27cLNmzcxdepUHDlyRHUJg+JXFS+PtbU1Dh48iJUrV8LPzw+nTp3Czp07YWlpiQ8//BBXr15VTaCuCpaWllixYgX+/fdfTJ06FXXr1sXZs2exdetWREVFwcvLC3PmzMHNmzeNuhpMebqsX79+qFGjRrn9lUeR9L31SMOGDTF+/HgAmkeR7ty5g3Pnzqm+lK5cuaJqu3HjhsY+7ezscPz4cSxcuBB16tTB/v37ER0djY4dO2Lr1q0GXSOLEH0JTN+lL4QQUgUSEhLQoEEDODo6IjMzk661QwipUvSJQwgxmadPn+L69esa7ffu3cMbb7wBhUKBMWPG0OCIEFLl6AgSIcRkEhMTUa9ePfj6+qJRo0aoVq0akpKScPnyZRQWFsLPzw8nT57k+jQiIYRPNEAihJhMXl4e5s2bh4iICCQlJSE7Oxt2dnZo3LgxhgwZgg8//BB2dnamLpMQ8hKiARIhhBBCSAl0Yp8QQgghpAS61UgpFAoFUlJS4OjoaJL7QRFCCCHEuBhjyM3NRe3atctd/EEDpFKkpKTA09PT1GUQQgghxMiSk5NRt27dMvvQAKkUjo6OAF58E81hBY1y1Cu2I1qUiy+Uix9izARQLt6YW66cnBx4enqqfseXhQZIpVC+kNWqVTOLAVJRURGOHz+OPn36wNLS0tTlGA3l4gvl4ocYMwGUizfmmkuXwRpN0iaEEEIIKYEGSIQQQgghJdAAiSMWFuI8I0q5+EK5+CHGTADl4g2vuehCkaXIyclB9erV8eTJE7OYg0QIIYSQitHndzsdQeKEQqFAWloaFAqFqUsxKsrFF8rFDzFmAigXb3jORQMkTsjlckRHR0Mul5u6FKOiXHyhXPwQYyaAcvGG51w0QCKEEEIIKYEGSIQQQgghJVRoavm9e/dw//59ZGRkwM7ODq6urmjSpAlsbGyMVR/5f4IgmM2VSI2JcvGFcvFDjJkAysUbnnPpvYotMjISa9euxbFjx/Dw4UON7ZaWlmjbti0GDRqEsWPHombNmkYrtirRKjZCCCFEXPT53a7zAGnr1q0ICwvDrVu3wBiDp6cn2rZtC3d3dzg7OyM/Px+ZmZmIi4vDlStXUFhYCGtra7z55puYP38+PDw8jBKuqpjbAEmhUCA5ORmenp7l3oGYJ5SLL5SLH2LMBFAu3phbLn1+t+t0iq1Dhw44f/48WrdujSVLlmDYsGGoU6dOqf2Liopw8uRJbNiwAVu3bsWff/6J9evXY9CgQfolISpyuRwxMTGoXbu2WbzJjIVy8cWcczHG8OzZM/0fJ5Ph6YWzuHP6FKr37AnLlq0BqVTnx9vZ2Znl6QNzfq0qgnLxhedcOg2QrKyscPToUQQHB+u0U0tLS/To0QM9evTAsmXLsGTJEiQlJVWoUEIIKcuzZ8/g4OCg12P6u9bAd428UNfGGgEAcPoI7hcUYtatJOxOz9JpH3l5ebC3t9e/YEKIWdNpgHTy5EmDn8DJyQlfffWVwY8nhJDK0N+1Bja0aKDRXtvaChtaNMCb/9zWeZBECBEfPm+Q8hISBAGurq5meSi/IigXX8w5l52dHfLy8nTrLJcD74wCHqdrbJL8f7ZNwZ2A1ZvKPd1mZ2end61VwZxfq4qgXHzhORfdi60U5jZJmxBiPPKYSyj4eGK5/WwWr4TUv00VVEQIqQpGn6StTXp6OsLDw3HhwgVkZ2drvYy4IAg4duyYoU9BipHL5YiPj0fDhg0h1WMCqbmjXHwRSy6WmWHUfuZILK9VSZSLLzznMmiAdO3aNQQHByMrKwtlHYDi8ZCauVIoFIiLi4Ovry93b7KyUC6+mHMuvVax2ek2mbvQzgGFT5+WvSszXcVmzq9VRVAuvvCcy6AB0owZM5CZmYnPP/8c48ePR926dbkLTggRF31WsUkA3Ojsh9rWVqo5R8UpGMODwudo1rELyrsHOa1iI0ScDLooQXR0NAYOHIj58+fD29ubBkeEEK4oAMy69eLSI4oSR8GV//7kVlK5gyNCiHgZdATJysoKvr6+xq6FlEEikcDLy4u7C22Vh3LxxZxz6bWKTSn6JNhvP6utZpO4ugETJmFTx246P685MufXqiIoF194zmXQKrZBgwYhOzsbkZGRlVGTWaBVbIS8HJhcDsU/MWCZGRCcXSBp4Q+BjooTIkr6/G43aEi3ePFi/Pvvv1i8eLFBBRL9yeVyXLlyRetqQZ5RLr6IMZcglQIt/PFPDTdARIMjMb5WAOXiDc+5dDrFNm7cOI225s2b45NPPsEvv/wCf39/rSMxQRCwZs2aildJoFAokJSUhObNm4tqzhfl4gvl4ocYMwGUizc859JpgLR27dpSt929exd3797Vuo0GSIQQQgjhkU4DpISEhMqugxBCCCHEbOg0QPL29q7sOkg5JBIJGjduzOVKgLJQLr5QLn6IMRNAuXjDcy6dV7GFh4cjODj4pRks0So2QgghRFwqZRXb+PHjUb9+fdSvXx/jx4/Hhg0b8ODBgwoXS3Qjk8kQFRUFmUxm6lKMinLxhXLxQ4yZAMrFG55z6TxAmjJlClq0aIF79+4hPDwcY8aMgZeXFxo1aoT33nsPW7ZsQWpqamXW+lJjjCE9Pb3Me9/xiHLxhXLxQ4yZAMrFG55z6Xwl7R9++AEAkJWVhRMnTiAyMhLHjx/Hv//+i9u3b+N///sfAKBJkyYICgpCUFAQunfvjpo1a1ZK4YQQQgghlUXvW43UqFEDAwcOxMCBAwEAmZmZOH78uGrAdOPGDcTGxmLVqlWQSCQoKioyds2EEEIIIZWqwtPKnZ2dMXjwYKxYsQKXLl3Cli1b0KxZMzDGoFDQrR6NRSqVwt/fn7sLbZWHcvGFcvFDjJkAysUbnnMZdC82JZlMhnPnziEyMhKRkZGIjo5GYWEhGGNo0qQJAgMDsWrVKmPWW2VoFRshhBAiLpV2LzaFQoFz585h0aJFCAkJgZOTE7p164awsDCkpaVh/Pjx2LJlCx49eoQbN25wOzgyRzKZDBEREVyuBCgL5eIL5eKHGDMBlIs3POfSeQ5Snz59cObMGeTl5UEQBLRo0QITJkxAYGAgunXrRpOxKxljDLm5uVyuBCgL5eIL5eKHGDMBlIs3POfSeYB08OBBSCQSDB48GF9++SVatGhRmXURQgghhJiMzqfY+vfvj+rVq2PHjh3w9/dHgwYN8M4772Djxo24f/9+ZdZICCGEEFKl9JqkzRhDTEyMakn/qVOn8OTJEwiCAB8fH3Tv3l315enpWZl1Vzpzm6StUCiQkZEBFxcXLu9pUxrKxRfKxQ8xZgIoF2/MLZc+v9srtIpNoVDgypUriIiIwPHjx3H69Gnk5uaqBkxBQUGqC0jyxtwGSIQQQgipmEpbxabxYIkEbdq0wcyZM7Fv3z5kZWVh27ZtaNasGRISEhAeHl6R3ZNiioqKsG/fPtFdeJNy8YVy8UOMmQDKxRuec+l9Je2SHj58qDrlFhkZibt376q28XhhKHPG4zJJXVAuvlAufogxE0C5eMNrLr0HSKmpqWq3FomPjwfwYn6SIAjw8/NT3YutW7duRi+YEEIIIaSy6TxAmjhxIo4fP464uDgA/w2Imjdvju7duyMoKAiBgYGoUaNGpRVLCCGEEFIVdJ6krZx93rRpU9URou7du4v2ApHmNklbebEtR0dHCIJg6nKMhnLxhXLxQ4yZAMrFG3PLpc/vdp2PIG3evBndu3eHu7t7hQskhrG1tTV1CZWCcvGFcvFDjJkAysUbXnPpvIptxIgRNDgyIZlMhv3793M72a00lIsvlIsfYswEUC7e8JxLpwFSUlJShZ/owYMHFd4HIYQQQkhV0GmA1LBhQ0yaNAkJCQl67byoqAibN29Gs2bNsGbNGoMKJIQQQgipajoNkL799lts2bIFDRo0QGBgIFasWIELFy5ovfDT/fv3sWPHDrz//vvw8PDAm2++CW9vb4waNUqngi5cuIDJkyejWbNmsLe3h5eXF4YPH45bt27p9Pjs7Gy8++67cHV1hb29PYKCgnD58mWdHksIIYQQAuixii07OxtLly7FmjVr8PDhQwiCAIlEAicnJzg5OaGgoACZmZkoKCh4sWNBQEhICGbMmIHg4GCdCxo6dCjOnDmDYcOGoWXLlnj06BF++ukn5OXl4ezZs2jevHmpj1UoFOjatSuuXr2KmTNnwsXFBStXrkRycjIuXbqEhg0b6lyHOa5ik8lksLCwMIuVAMZCufhCufghxkwA5eKNueWq1HuxyeVyHDhwAMeOHUNUVBTu37+Px48fw9bWFq6urmjRogUCAwMxYMAAeHt76118VFQU2rZtCysrK1VbfHw8WrRogaFDh2LDhg2lPnbr1q0YMWIEtm3bhqFDhwIA0tPT0ahRI/Tu3RubNm3SuQ5zHCCZ01JJY6FcfKFc/BBjJoBy8cbcclXqvdikUin69u2LZcuW4dy5c3jw4AEKCgqQlZWFW7duYceOHZgyZYpBgyMA6NSpk9rgCHgxB6pZs2a4efNmmY/dvn073N3dMXjwYFWbq6srhg8fjr///huFhYUG1WQOZDIZIiMjuVwJUBbKxRfKxQ8xZgIoF294zlXhe7FVBcYYUlNT0axZszL7XblyBa1bt1Zd1FKpffv2WL16NW7duoUWLVpofWxhYaHaAConJwfAi4nmyrlWEokEUqkUcrkcCoVC1VfZLpPJUPyAnFQqhUQiKbW95BwuC4sXL0fJN5KFhYXq8cUfY2lpCYVCAblcrmoTBAEWFhaltpdWuykyFc+j/K8YMslkMrVcYsmkzKMklkzFcykUCrX9UCbzylScmDIpPyOK5xJLpuKfheaQSZ+b5nIxQNq4cSMePHiA+fPnl9nv4cOHWu//5uHhAQBISUkpdYC0cOFCzJs3T6P98OHDsLOzAwB4eXmhVatWuHbtmtqlDxo3bowmTZrg/PnzSE9PV7X7+/vD29sbJ0+eRG5urqq9Y8eOcHNzw+HDh9XeVEFBQbC1tcX+/fvVaujTpw/y8vIAAEeOHAHw4k0ZGhqKjIwMREdHq/o6OjoiODgYycnJiImJUbW7urqiU6dOiI+PV90uxtSZ8vPzERkZqcoltkwAEBkZKbpMSmLKpPyQf/z4MS5cuECZzDjTq6++CuC/z0IxZAoNDcXjx4/VcokhU/HPiCNHjphFJuXBD13oPQepqsXGxiIgIADNmjXDqVOnIJVKS+0rlUrx3nvvYeXKlWrtERER6NGjB3bu3ImBAwdqfay2I0ienp7IyMhQnac05Qi9qKgIhw8fRnBwMCwtLQGI42hLfn4+IiIiVLnEkEl5BEmZy87OThSZAKhyhYSEqOrhPZMyV2RkJHr27Kl2BJoymVcmpUOHDql9FvKeydLSEoWFhThy5IgqlxgyKRQKFBQUqD4LraysTJ4pJycHLi4ulTNJuyo9evQInTt3RlFREc6ePYvatWuX2d/BwQEjRozQuObS/v37ERoaioMHDyIkJESn5za3SdqEEEIIqZhKnaRdVZ48eYLevXsjOzsbBw8eLHdwBLw4lfbw4UONdmWbLvswVwqFAmlpaWqjaDGgXHyhXPwQYyaAcvGG51xmOUAqKChAv379cOvWLezduxevvPKKTo/z9/fH5cuXNV6Ic+fOwc7ODo0aNaqMcquEXC5HdHS02mFJMaBcfKFc/BBjJoBy8YbnXGY3QJLL5RgxYgSio6Oxbds2dOzYUWu/hw8fIjY2Vu086dChQ5Gamoq//vpL1ZaRkYFt27ahX79+sLa2rvT6CSGEEMI/s1vFNmPGDOzevRv9+vVDZmamxoUh33zzTQDA7NmzsW7dOiQkJMDHxwfAiwFShw4d8Pbbb+PGjRuqK2nL5XKtK9QIIYQQQrSp0ADp/PnzuHDhArKzs7UePhMEAV988YVe+1Qu/duzZw/27NmjsV05QNJGKpVi//79mDlzJn788Ufk5+ejXbt2WLt2LRo3bqxXHeZGEASzuRKpMVEuvlAufogxE0C5eMNzLoNWsWVmZmLgwIE4c+YMynq4IAhcnncEaBUbIYQQIjb6/G436AjS9OnTcfr0aXTv3h1jxoxB3bp1VddCIJVDoVAgOTkZnp6eGlcK5xnl4gvl4ocYMwGUizc85zJoVLN37160b98ex44d4/KwGY/kcjliYmJQu3Zt7t5kZaFcfKFc/BBjJoBy8YbnXAZVm5+fj27dutHgiBBCCCGiZNAAyd/fH4mJiUYuhRBCCCHEPBg0QAoLC8Pu3btx9uxZY9dDSiEIAlxdXUV31I5y8YVy8UOMmQDKxRuec+m0im39+vUabX///Tf27t2LN954A61bty51Nvjo0aMrXqUJ0Co2QgghRFz0+d2u0wBJIpFojP5KPkzbdlrmbzxyuRzx8fFo2LAhpFKpqcsxGsrFF8rFDzFmAigXb8wtl9GX+YeHhxulMGI4hUKBuLg4+Pr6msWbzFgoF18oFz/EmAmgXLzhOZdOA6QxY8ZUdh2EEEIIIWaDr4sSEEIIIYRUAYMGSHv37sXgwYORkpKidXtKSgoGDx6MAwcOVKg48h+JRAIvLy/uLrRVHsrFF8rFDzFmAigXb3jOZdC92Hr37o2UlBRcvXq11D6tWrVCnTp1sHfv3goVaCrmNkmbEEIIIRWjz+92g4Z0V69eRUBAQJl9AgICEBMTY8juiRZyuRxXrlzhdlVgaSgXXygXP8SYCaBcvOE5l0EDpMzMTLi5uZXZx8XFBRkZGQYVRTQpFAokJSVBoVCYuhSjolx8oVz8EGMmgHLxhudcBg2QXF1dERcXV2afuLg4ODs7G1QUIYQQQogpGTRA6tatG/bs2YNr165p3X716lXs3r0bgYGBFSqOEEIIIcQUDBogffLJJwCALl26YP78+YiOjkZSUhKio6Mxb948dO3aFRKJBLNnzzZqsS8ziUSCxo0bc7kSoCyUiy+Uix9izARQLt7wnMugVWwAsGPHDowZMwb5+flq7YwxODg4YP369Rg4cKAxajQJWsVGCCGEiEulr2IDgCFDhuDu3btYuHAhBg8ejB49emDIkCH47rvvcOfOHa4HR+ZIJpMhKioKMpnM1KUYFeXiC+XihxgzAZSLNzzn0ulWI6Vxc3PDrFmzjFULKQNjDOnp6Ro3CeYd5eIL5eKHGDMBlIs3POfi76QgIYQQQkglq9AAaePGjejVqxdcXV1hbW0NV1dX9OrVC5s2bTJWfYQQQgghVc6gU2xyuRzDhw/Hrl27wBiDjY0NateujdTUVBw7dgwRERHYsWMHtm3bxuXMdXMklUrh7+8PqVRq6lKMinLxhXLxQ4yZAMrFG55zGTR6+fHHH7Fz50507twZZ86cwbNnz5CQkIBnz54hKioKXbp0wa5du7BixQpj1/vSkkgk8Pb2Ft2Ak3LxhXLxQ4yZAMrFG55zGVTxunXr0KhRIxw7dgwdO3ZU29ahQwccPXoUjRo1Qnh4uFGKJC9WAkRERHC5EqAslIsvlIsfYswEUC7e8JzLoAHSrVu30L9/f1haWmrdbmlpiX79+uHWrVsVKo78hzGG3NxcLlcClIVy8YVy8UOMmQDKxRuecxk0QLKyssLTp0/L7PP06VNYWVkZVBQhhBBCiCkZNEBq1aoVtm7dipSUFK3bHz58iK1bt6J169YVKo4QQgghxBQMutXInj17MGDAANSqVQszZsxAYGAg3N3dkZqaiuPHj2Pp0qVITU3F33//jb59+1ZG3ZXO3G41olAokJGRARcXFy4nu5WGcvGFcvFDjJkAysUbc8ulz+92g+/FtnTpUnz66aeQy+Vq7YwxWFhY4Ntvv8VHH31kyK7NgrkNkAghhBBSMVVyL7bp06cjNjYWc+fOxcCBAxEcHIyBAwdi/vz5iI2N5XpwZI6Kioqwb98+FBUVmboUo6JcfKFc/BBjJoBy8YbnXBW6F1v9+vXxxRdfGKsWUg4el0nqgnLxhXLxQ4yZAMrFG15zGeWEoEwmQ1ZWFrffBEIIIYSQ4gweIMnlcixbtgx+fn6wsbGBi4sLbGxs4Ofnhx9++IEGS4QQQgjhlkGTtPPy8hASEoKzZ89CIpHA09NTtYotOTkZCoUCHTt2xKFDh2Bvb18ZdVc6c5ukrbzYlqOjIwRBMHU5RkO5+EK5+CHGTADl4o255ar0SdpffvkloqOj8frrr+POnTu4e/cuoqOjcffuXdy5cwcjR45EVFQUvvzyS4MCEO1sbW1NXUKloFx8oVz8EGMmgHLxhtdcBg2Qtm7dirZt22LDhg3w8vJS2+bl5YWNGzeiTZs22LJli1GKJC/mee3fv190py4pF18oFz/EmAmgXLzhOZdBA6THjx+jZ8+eZfbp2bMnMjMzDSqKEEIIIcSUDBogNWzYEGlpaWX2SU9PR4MGDQwqihBCCCHElAwaIE2dOhVbtmzB9evXtW7/559/8Oeff2LatGkVqY0QQgghxCQMWsV28uRJLFmyBIcPH8aYMWPQpUsX1Sq2U6dOYf369QgJCcH06dM1HtutWzejFF7ZzHEVm0wmg4WFhVmsBDAWysUXysUPMWYCKBdvzC1Xpd+LTSKRQBAEKB9aPLS2tuJK3rvNXJnjAMmclkoaC+XiC+XihxgzAZSLN+aWS5/f7QbdauTLL780i6AvE5lMhsjISPTp0weWlpamLsdoKBdfKBc/xJgJoFy84TmXQQOkuXPnGrkMQgghhBDzYZR7sRFCCCGEiIlBR5CUrly5gs2bNyM2NhbPnj3D0aNHAQD37t3DuXPn0LNnTzg7OxulUAJYWFTo5TJblIsvlIsfYswEUC7e8JrLoEnaADBr1iwsWbJEbVK2cgJ2YmIiGjRogCVLlmDq1KnGq7YKmdskbUIIIYRUTKXfiy08PByLFy9G3759ce3aNcyePVttu4+PD9q3b4/du3cbsnuihUKhQFpaGhQKhalLMSrKxRfKxQ8xZgIoF294zmXQAGnlypVo2rQpduzYgebNm8PKykqjT5MmTRAfH1/hAskLcrkc0dHR3FwmQVeUiy+Uix9izARQLt7wnMugAdKNGzfQq1evMs8ruru7l3s7EkIIIYQQc2TQAMnCwgLPnz8vs09KSgocHBwMKooQQgghxJQMGiC1aNECERERpR4yU65oa9OmTYWKI/8RBMFsrkRqTJSLL5SLH2LMBFAu3vCcy6BVbL///jsmTJiA8ePH46effsKiRYswf/58yOVy5OTkYMKECdixYwe2bNmCoUOHVkbdlY5WsRFCCCHiUumr2MaNG4eRI0dizZo1cHV1xZo1awAA7du3R506dbB9+3aMGTOG28GROVIoFLh37x6XKwHKQrn4Qrn4IcZMAOXiDc+5DL6S9qZNm/Drr7+iXr16ePDgARhjuHjxIry8vLBq1Sr8/vvvxqzzpSeXyxETE8PlSoCyUC6+UC5+iDETQLl4w3OuCt1q5J133sHVq1eRl5eH+/fvIycnB9evX8d7771n8D7z8vIQFhaG1157Dc7OzhAEAWvXrtXpsWvXroUgCFq/Hj16ZHBNhBBCCHm5GOX637a2trC1tTXGrpCRkYH58+fDy8sLfn5+OH78uN77mD9/PurVq6fW5uTkZJT6CCGEECJ+ZneDFA8PDzx8+BC1atXCxYsX0a5dO7330bt3b7Rt27YSqjMdQRDg6urK5UqAslAuvlAufogxE0C5eMNzLrMbIFlbW6NWrVoV3k9ubi7s7OwglUqNUJXpWVhYoFOnTqYuw+goF18oFz/EmAmgXLzhOZfZDZCMISgoCHl5ebCyskJISAiWLFmChg0blvmYwsJCFBYWqv6dk5MDACgqKkJRUREAQCKRQCqVQi6Xq83IV7bLZDIUv2qCVCqFRCIptV25XyXllcllMplGu1wuR1xcHHx9fVWDPktLSygUCrXJb4IgwMLCotT20mo3RSbgxff9zp07qlxiyCSTySCXy1W5bGxsRJEJgCpXkyZN1G5QzXMmZa7ExET4+vqq9aVM5pVJ+by3bt1CvXr1VJ+FvGeytLREUVERbt26pfosFEMmhUKB58+fqz4LLSwsTJ6pZK6yiGqAZGdnh7FjxyIoKAjVqlXDpUuXsHTpUnTq1AmXL1+Gp6dnqY9duHAh5s2bp9F++PBh2NnZAQC8vLzQqlUrXLt2DUlJSao+jRs3RpMmTXD+/Hmkp6er2v39/eHt7Y2TJ08iNzdX1d6xY0e4ubnh8OHDam+qoKAg2NraYv/+/Wo19OnTB7m5ubh9+zZu374N4MWbMjQ0FBkZGYiOjlb1dXR0RHBwMJKTkxETE6Nqd3V1RadOnRAfH4+4uDhVuykz5efnIzIyEgBw+/Zt0WUCgMTERNFlAoBGjRohKytLNJksLCwgk8lQrVo1XLhwgTKZcaZXX30Vt27dwq1bt0STKTQ0FOnp6Wqf8WLIVPxz7/bt22aRSXnwQxcGXSiyqijnIIWHh2Ps2LEG7eP06dPo1q0b3n33Xfzyyy+l9tN2BMnT0xMZGRmqi0mZcoReVFSEAwcOoFevXrC0tAQgjiNI+fn5OHLkiCqXGDLJZDIUFRWpctnZ2YkiEwBVrj59+qjq4T1T8VyvvfYaJJL/FvdSJvPKpLR//361z0LeM1laWqKwsBAHDx5U5RJDJoVCgYKCAtVnoZWVlckz5eTkwMXFRacLRRp0BGn9+vVwd3dHSEiIIQ+vUl26dEFAQACOHj1aZj9ra2tYW1trtCt/aRcnlUq1zm0q7ea9pbWX3G9Z7coJbiXrkUgkah9+5bWXVrspMhVvL55LLJmK/7+YMimJNZM+WSlT1WdS/uLW9tnMayYAqrp1/YznJVPxz3jlc5kyU2n1a2PQdZDGjx+PgwcPGvJQk/D09ERmZqapy6gQiUQCLy8vrW8qnlEuvlAufogxE0C5eMNzLoOOIHl4eGg97Gmu7t69C1dXV1OXUSFSqRStWrUydRlGR7n4Qrn4IcZMAOXiDc+5DBrS9e/fH0eOHFGbs1PVHj58iNjYWLXzpMUnaynt378fly5dwmuvvVaV5RmdXC7HlStXuLxce1koF18oFz/EmAmgXLzhOZdBA6Svv/4a9vb2GDx4MK5fv27smvDTTz9hwYIFqvu57dmzBwsWLMCCBQvw5MkTAMDs2bPRtGlTPHjwQPW4Tp06Yfjw4fjuu+/w66+/4r333sOAAQPg6emJOXPmGL3OqqRQKJCUlMTlDf/KQrn4Qrn4IcZMAOXiDc+5DDrF1qpVKxQWFiImJgYHDx6EjY0N3NzcNK6UKQgC7ty5o/f+Fy9ejHv37qn+/ddff+Gvv/4CALz55puoXr261seNGDEC+/btw+HDh/Hs2TN4eHjgnXfeQVhYGNzd3fWugxBCCCEvJ4MGSAqFAlZWVvDy8lJrL3nFAEOvIJCYmFhun7Vr12rcxFZ5lIkQQgghpCIMGiDpMoAhxiWRSNC4cWMuVwKUhXLxhXLxQ4yZAMrFG55zmfWFIk0pJycH1atX1+liUoQQQggxf/r8bq/wkO7GjRv466+/8Mcff1R0V6QMMpkMUVFRXF1eQReUiy+Uix9izARQLt7wnMvgAdKFCxfg7++PFi1aYNiwYWq3Ajl58iTs7Oywe/duY9RI8GI+V3p6usHzuswV5eIL5eKHGDMBlIs3POcyaIB0/fp1BAcHIyEhAR999BF69+6ttr1r165wcXHBtm3bjFIkIYQQQkhVMmiAFBYWBgC4dOkSFi9ejHbt2qltFwQBHTt2VLuDNCGEEEIILwwaIJ04cQJDhgxBgwYNSu3j5eWFhw8fGlwYUSeVSuHv76/1hn08o1x8oVz8EGMmgHLxhudcBi3zz83NhZubW5l98vPzuby0uLmSSCTw9vY2dRlGR7n4Qrn4IcZMAOXiDc+5DDqC5OnpiX/++afMPpcvX4avr69BRRFNMpkMERERXK4EKAvl4gvl4ocYMwGUizc85zJogNS3b18cPnwYR48e1bp969atOHv2LAYOHFiR2kgxjDHk5uZyuRKgLJSLL5SLH2LMBFAu3vCcy6BTbHPmzMH27dvRp08fjBkzBo8ePQIArFy5EtHR0di8eTN8fHwwffp0oxZLCCGEEFIVDBogubq64sSJE3jrrbewZs0aVfvkyZMBAAEBAdi8eXOpN5UlhBBCCDFnFb7VSExMDM6ePYvMzExUq1YNAQEBGsv+eWRutxpRKBTIyMiAi4sLl/e0KQ3l4gvl4ocYMwGUizfmlkuf3+10L7ZSmNsAiRBCCCEVU6X3Ynv8+DEiIiKwc+dORERE4PHjxxXdJdGiqKgI+/btQ1FRkalLMSrKxRfKxQ8xZgIoF294zmXQHCQASExMxNSpU7Fv3z612emCIKBv37744Ycf4OPjY4wayf/jcZmkLigXXygXP8SYCaBcvOE1l0EDpDt37qBz585IS0tDw4YN0blzZ7i7uyM1NRVRUVHYvXs3zp49i6ioKNSvX9/YNRNCCCGEVCqDBkiffPIJ0tPT8csvv+Cdd96BIAiqbYwxrF69GhMnTsQnn3xCN6wlhBBCCHcMmqRdo0YNdO/eHTt37iy1z4ABA3Dy5ElkZWVVqEBTMbdJ2sqLbTk6OqoNSHlHufhCufghxkwA5eKNueWq9EnacrkczZo1K7NP8+bN6V5sRmZra2vqEioF5eIL5eKHGDMBlIs3vOYyaIDUunVrXL9+vcw+169fR9u2bQ0qimiSyWTYv38/t5PdSkO5+EK5+CHGTADl4g3PuQwaIH399dc4cOAA/ve//2ndvnr1ahw6dAgLFiyoUHGEEEIIIaZg0CTtY8eOISgoCO+99x6WLFmitortzJkzuHXrFkJCQnD06FG1G9oKgoAvvvjCaMUTQgghhFQGgwZIc+fOVf1/XFwc4uLiNPocPHgQBw8eVGujARIhhBBCeGDQKrYTJ04Y/ISBgYEGP7YqmeMqNplMBgsLC7NYCWAslIsvlIsfYswEUC7emFsufX63G3QEiZdBjtjk5+fD0dHR1GUYHeXiC+XihxgzAZSLN7zmMv2tdYlOZDIZIiMjuVwJUBbKxRfKxQ8xZgIoF294zkUDJEIIIYSQEmiARAghhBBSAg2QOGJhYdCUMbNHufhCufghxkwA5eINr7kMWsX2MjC3VWyEEEIIqZhKvxcbqXoKhQJpaWlQKBSmLsWoKBdfKBc/xJgJoFy84TkXDZA4IZfLER0dLbobAFMuvlAufogxE0C5eMNzLhogEUIIIYSUoNPMKYlEYtAVMAVB4PLaB4QQQgh5uek0QOrWrZvGACkrKwvXrl2DVCqFp6en6ma1ycnJkMvlaNmyJWrUqFEpRb+MBEGAo6OjWVyq3ZgoF18oFz/EmAmgXLzhOZdBq9ju37+Pzp07o2vXrvjmm2/g5eWl2paUlITZs2fjzJkzOH36NOrWrWvUgqsKrWIjhBBCxKXSV7F9/PHH8PDwwIYNG9QGRwDg5eWFjRs3olatWpg5c6YhuydaKBQK3Lt3j8uVAGWhXHyhXPwQYyaAcvGG51wGDZCOHj2KHj16lNknODgYR48eNagookkulyMmJobLlQBloVx8oVz8EGMmgHLxhudcBg2QCgoK8PDhwzL7pKSkID8/36CiCCGEEEJMyaABUps2bfDnn38iOjpa6/aoqChs2bIF7dq1q1BxhBBCCCGmYNANUr7++mv06NEDXbt2Rb9+/dClSxe4ubkhLS0Np06dwt69e2FhYYEFCxYYu96XliAIcHV15XIlQFkoF18oFz/EmAmgXLzhOZfB92I7duwY3n33XSQkJLzYkSBAuat69eph9erV5c5TMme0io0QQggRF31+txt8i90ePXrg9u3bOH36NK5evYonT56gevXq8PPzQ5cuXbgcLZozuVyO+Ph4NGzYEFKp1NTlGA3l4gvl4ocYMwGUizc85zJ4gAS8OGrUtWtXdO3a1Vj1kFIoFArExcXB19eXuzdZWSgXXygXP8SYCaBcvOE5V4UGSABw48YNxMbG4unTp3jrrbeMURMhhBBCiEkZfLPaCxcuwN/fHy1atMCwYcMwduxY1baTJ0/Czs4Ou3fvNkaNhBBCCCFVyqAB0vXr1xEcHIyEhAR89NFH6N27t9r2rl27wsXFBdu2bTNKkeTFDYO9vLwgkRg8pjVLlIsvlIsfYswEUC7e8JzLoFVsQ4cOxaFDh3DlyhU0aNAA8+bNw/z589WulDlixAhcvXoVsbGxRi24qtAqNkIIIURcKv1ebCdOnMCQIUPQoEGDUvt4eXmVe7Vtoju5XI4rV65webn2slAuvlAufogxE0C5eMNzLoMGSLm5uXBzcyuzT35+PpffEHOlUCiQlJTE5Q3/ykK5+EK5+CHGTADl4g3PuQwaIHl6euKff/4ps8/ly5fh6+trUFGEEEIIIaZk0ACpb9++OHz4MI4ePap1+9atW3H27FkMHDiwIrURQgghhJiEQddBmjNnDrZv344+ffpgzJgxePToEQBg5cqViI6OxubNm+Hj44Pp06cbtdiXmUQiQePGjblcCVAWysUXysUPMWYCKBdveM5l8L3Y7t69i7feegvR0dEa2wICAlSDJH3l5eXh+++/x7lz53D+/HlkZWUhPDxc7TpLZcnOzsasWbOwc+dOPHv2DO3bt8eSJUvQunVrveqgVWyEEEKIuFT6KjYAqF+/Ps6cOYPLly9j5cqVWLBgAX788UecO3cO0dHRBg2OACAjIwPz58/HzZs34efnp9djFQoFQkNDsWnTJkyePBnfffcd0tLS0L17d8THxxtUj7mQyWSIioqCTCYzdSlGRbn4Qrn4IcZMAOXiDc+5KnyrEX9/f/j7+xuhlBc8PDzw8OFD1KpVCxcvXkS7du10fuz27dsRFRWFbdu2YejQoQCA4cOHo1GjRggLC8OmTZuMVmdVY4whPT0dBh7wM1uUiy+Uix9izARQLt7wnMvsTgpaW1ujVq1aBj12+/btcHd3x+DBg1Vtrq6uGD58OP7++28UFhYaq0xCCCGEiJjBR5Byc3OxZs0aXL16FSkpKSgqKtLoIwgCjh07VqEC9XHlyhW0bt1aYzJY+/btsXr1aty6dQstWrTQ+tjCwkK1AVROTg4AoKioSJVNIpFAKpVCLperXdNB2S6TydRGyVKpFBKJpNT2kt8zC4sXL0fJQ5EWFhaqxxd/jKWlJRQKhdr1pgRBgIWFRantpdVuikzF8yj/K4ZMMplMLZdYMinzKIklU/FcCoVCbT+UybwyFSemTMrPiOK5xJKp+GehOWTSNlYpjUEDpAsXLqB3797Iysoq87CZIAiG7N5gDx8+RLdu3TTaPTw8AAApKSmlDpAWLlyIefPmabQfPnwYdnZ2AF5cHbxVq1a4du0akpKSVH0aN26MJk2a4Pz580hPT1e1+/v7w9vbGydPnkRubq6qvWPHjnBzc8Phw4fV3lRBQUGwtbXF/v371Wro06cP8vPzAQBHjhwB8OJNGRoaioyMDLWJ8o6OjggODkZycjJiYmJU7a6urujUqRPi4+MRFxenajd1psjISFUusWUCgMjISNFlUn44iSmThYUF/P39kZWVhXPnzlEmM87Uu3dv1K9fX/VZKIZMoaGhyMrKAvDfZ7wYMhX/jDhy5IhZZFIe/NCFQavYOnXqhPPnz2PhwoV4/fXX4eHhAalUqu9uyqWcg6TrKjapVIr33nsPK1euVGuPiIhAjx49sHPnzlKvzaTtCJKnpycyMjJUM93NdYQulr/iKRNlokyUiTJRpsrMlJOTAxcXF51WsRl0BOnKlSsYOXIkZs6cacjDK42tra3WeUYFBQWq7aWxtraGtbW1RrulpSUsLS3V2qRSqdYBofKNomt7yf2W1S6TyXDq1Cl069ZNbX8SiUTr9SVKay+tdlNkAl78YJw8eVItF++ZLC0tIZPJVLnKqp2nTADUcllYWIgiE/AiV0REBLp166a1P2Uyj0yA5nuwOF4zAS9OherzGc9DJolEAoVCofNnfFVkKq1+bQyapO3s7AxXV1dDHlqplCvgSlK21a5du6pLMhrGGHJzc7lcCVAWysUXysUPMWYCKBdveM5l0ABp4MCBiIiIMLubz/n7++Py5csadZ07dw52dnZo1KiRiSojhBBCCE8MGiAtXLgQlpaWeOONN/DgwQNj16SThw8fIjY2Vu086dChQ5Gamoq//vpL1ZaRkYFt27ahX79+Wk+hEUIIIYSUZPCtRi5fvoyePXviyZMnqFGjhtbJToIg4M6dO3rv+6effkJ2djZSUlKwatUqDB48GK1atQIAfPjhh6hevTrGjh2LdevWISEhQXXVbrlcji5duuDff//FzJkz4eLigpUrVyIpKQkXLlxA48aNda7B3G41olAokJGRARcXFy7vaVMaysUXysUPMWYCKBdvzC2XPr/bDRogHTt2DP369UNBQQEsLS3h5uZW6qSohIQEfXcPHx8f3Lt3r9T9+fj4aB0gAUBWVhZmzpyJXbt2IT8/H+3atcPixYvRtm1bvWowtwESIYQQQiqm0u/F9sknn4Axhi1btqCgoADJyclISEjQ+mWIxMREMMa0fikHQ2vXrlX7t1KNGjXwv//9DxkZGXj69CmOHz+u9+DIHBUVFWHfvn16XeSKB5SLL5SLH2LMBFAu3vCcy6Bl/jdu3MCbb76JYcOGGbseUgYeb/anC8rFF8rFDzFmAigXb3jNZdARJFdX1zKvKUQIIYQQwjODBkhvvPEGDhw4oLr9BSGEEEJIcUwuh+LaZbjfug7Ftctgxa6ezQODJmk/f/4cI0aMQGZmJr755hv4+fnBwcGhMuozGXObpK282Jajo2OV3+OuMlEuvlAufogxE0C5eCE7FYnnPy8Fy0hTtQkubrCaNB0WXYNMVlelr2JTXvKbMVbmCykIArfnHs1xgCSTyWBhYSGKHx4lysUXysUPMWYCKJepMMbw7Nkz3TpHnwQWzQUDoDXJp3OBjpo3ltfGzs7OqN8PfX63GzRJu2vXrmb5AoqZTCbD/v370adPH73uJWPuKBdfKBc/xJgJoFym8vTpUzg6OpbbTwLgRmc/1La2gkTLOEHBGB7MnY1mZ65Cl3tx5ObmmuwMlUEDpOPHjxu5DEIIIYSYK12PHnV2ckRdm9LvWiERBHjaWKOzkyNOZefq9LymGiCZ/rKWhBBCCDFrdnZ2OvWrZa3b0S9d++n6vJXBoCNIhBBCCHl52NvbIy8vr/yO/8QAn08vt1v4rt0Ib+Ffbj9TDpAMmqQdHBys284FAceOHdO7KHNAk7SrBuXiC+XihxgzAZTL3DG5HPlvDFRbvVaS4OoO2w07Ifz/gq+qVOmTtMubgyQIQrkr3Ij+8vPzdZokxxvKxRfKxQ8xZgIolzkTpFJYTZqOwnmfltrHauJHJhkc6cugOUgKhULrV3Z2NiIiIhAQEIChQ4fi+fPnxq73pSWTyRAZGcntZRNKQ7n4Qrn4IcZMAOXigUXXIFiHLYLg4qbWLri6wzpskUmvg6QPo85BqlatGrp3745Dhw6hRYsW+Prrr/Hll18a8ykIIYQQYuYsugZB2qkbnsdcxJWIY2gV3ANW/m25OHKkVCmr2BwdHdG7d2+Eh4dXxu4JIYQQYuYEqRSSlq2R2qgZJC1bczU4Aipxmb9EIsHDhw8ra/cvJQsLcS46pFx8oVz8EGMmgHLxhtdcBq1iK8/du3fRoUMHODs7IzY21ti7rxLmtoqNEEIIIRVT6avYxo0bp7VdJpPhwYMHOH36NIqKijB//nxDdk+0UCgUyMjIgIuLCyQS8Vzfk3LxhXLxQ4yZAMrFG55zGVTt2rVrtX5t2LABkZGRqF+/PlavXo3333/f2PW+tORyOaKjoyGXy01dilFRLr5QLn6IMRNAuXjDcy6DjiAlJCRobZdIJHBycuL+Og6EEEIIebkZNEDy9vY2dh2EEEIIIWbDqFPLc3JycO7cOdjY2KBLly50JW0jEgQBjo6OovueUi6+UC5+iDETQLl4w3Mug1ax/fbbb9iwYQN27dqFGjVqAACuXr2K3r17IzU1FQDQsWNHHD582KQ3mqsIWsVGCCGEiIs+v9sNmqT9xx9/oLCwUDU4AoAZM2YgLS0Nb7/9Nvr06YPo6GisWrXKkN0TLRQKBe7duweFQmHqUoyKcvGFcvFDjJkAysUbnnMZNEC6desW/Pz8VP9+/PgxIiMjMWHCBPzvf//Dnj170K5dO2zcuNFohb7s5HI5YmJiuFwJUBbKxRfKxQ8xZgIoF294zmXQACk7Oxuurq6qf586dQoAMHjwYFVbly5dkJiYWLHqCOEQk8uhuHYZ7reuQ3HtMhiHHwyEEPKyM2iSds2aNdVuI3Ls2DFIpVJ07txZ1cYYQ1FRUcUrJIQjslOReP7zUrCMNLQAIDv0N+QubrCaNJ2bO1gTQggxcIDUsmVL/P333/joo49gY2ODTZs2oXPnzrC3t1f1SUxMhIeHh9EKfdkJggBXV1cuVwKUxZxzMcbw7Nkz3R8QfRJYNBcMQPE0LCMNhfM+ReGnc4GO3crdjZ2dnVl+PwDzfr0qQoy5xJgJoFy84TmXQavYIiMj0bNnT7W2Xbt2oV+/fgBeTMry8PBAcHAwNm/ebJxKqxitYiNPnz6Fg4ODTn0lAG509kNtaytItHwQKBjDg8LnaHbmKsqbqpiXl6f2xwYhhBDjqPRVbEFBQdi9ezcGDRqEQYMGYcuWLarBEQCcOXMGtWvXVpuTRCpGLpcjNjaWy4luZTHnXPr87dDZyRF1bay1Do4AQCII8LSxRmen8q8yXwn3jzYac369KkKMucSYCaBcvOE5l8F3jgsNDcX27duxfft2DB06VG1b165dceXKFQwbNqzCBZIXFAoF4uLiuFwqWRax5KplbWnUfuZKLK9XSWLMJcZMAOXiDc+5+Lq1LiFVSJ9z5o8KdVuQoEs/Hs/VE0KI2FToViPnz5/HhQsXkJ2drfXwmSAI+OKLLyryFISYjJ2dHfLy8nTrLJcD74wCHqeX3sfFFQfuHQGk0nKflxBCiGkZNEDKzMzEwIEDcebMmTLnS9AAyXgkEgm8vLwgkYjroJ855xIEQa/J0rLJM1A479NSt1tPmgELzif8m/PrVRFizCXGTADl4g3PuQxaxTZ27FisX78e3bt3x5gxY1C3bl1YWGgfawUGBla4SFOgVWzEEMWvg6QkuLrDauJHdB0kQggxMX1+txs0QHJxcUGDBg0QHR0t2vkS5jZAksvluHbtGlq2bAlpOadoeCLGXEwuR9HVy0iKuQIv/1aw9GsNQSTZxPh6AeLMJcZMAOXijbnlqvRl/vn5+ejWrZtoB0fmSKFQICkpicuVAGURYy5BKoXQwh/Xa7pDaOEvmsERIM7XCxBnLjFmAigXb3jOZdAAyd/fn+6zRgghhBDRMmiAFBYWht27d+Ps2bPGrocQQgghxOQMWsX26NEjhIaGIjAwEG+88QZat25d6rm80aNHV6hA8oJEIkHjxo25XAlQFsrFF8rFDzFmAigXb3jOZdAkbYlEAkEQ1Jb4l5yPxBiDIAhcXl4cML9J2oQQQgipGH1+txt0BCk8PNygwojhZDIZzp8/j/bt25d6SQUeUS6+UC5+iDETQLl4w3Mug6odM2aMsesg5WCMIT093axvZGoIysUXysUPMWYCKBdveM7F30lBQgghhJBKVqHjXYmJidi4cSNiYmKQk5ODatWqwd/fH2+88QZ8fHyMVCIhhBBCSNUyaJI2ACxfvhyzZs2CTCbTOHRmaWmJ7777DlOnTjVKkaZgbpO0FQoFkpOT4enpyeVqgNJQLr5QLn6IMRNAuXhjbrkq/VYje/fuRf/+/eHi4oKPPvoIQUFB8PDwwKNHjxAZGYmlS5fi8ePH2L17N0JDQw0OYkrmNkAihBBCSMVU+q1Gli5dCmdnZ1y+fBmzZ89Ghw4d4O3tjYCAAHz66ae4dOkSatSogaVLlxoUgKhjcjmeXzqPqyuW4vml82CcXjpBG5lMhoiICMhkMlOXYlSUiy9izCXGTADl4g3PuQyag3T58mW88cYbqFu3rtbtnp6eGD58ODZt2lSh4oj63eEbACj6ewtkLm6wmjSd+7vDM7kc8quXYHfpLOTV7SH1byua+5YxxpCbm8vlyo2yUC5+iDETQLl4w3MugwZIz58/h729fZl9HBwc8Pz5c4OKEjPGGJ49e6Zb5+iTwKK5YACKX4aTZaShcN6nKPx0LtCxW7m7sbOzM7sbCxcf+LUAIDv0N+QiGfgRQgjhn0EDpEaNGmHPnj345ptvtF74SSaTYe/evWjUqFGFCxSbZ8+ewcHBodx+EgA3OvuhtrUVJFoGNwrG8GDubDQ7cxXl3SM5Ly+v3AFtReg16ANEPfAjhBAiDgbNQRo9ejTi4uIQEhKCS5cuqW27ePEievfujbi4OLqgpBa6Hmbs7OSIujbWWgdHACARBHjaWKOzk6PRntNQT58+hYODg05f1RwccH/ubCgYg7ZkCsaQPHc2qumwr6dPn1ZqroqQSqXo2LEjpCI5ZahEufghxkwA5eINz7kMOoI0depUnDx5Ert370b79u1hZ2cHNzc3pKWl4dmzZ2CMYcCAAVwv8ze1WtaWRu1XmfQ5eqQc+JWm+MDvVHZuuc+ry9E4U5BIJHBzczN1GUZHufghxkwA5eINz7kMOoIklUqxa9curF27Ft27d4eVlRWSkpJgZWWFoKAgrFu3Djt37jSLax6YG11PCT0qLDJav8o+DWVnZ6dzX2MO/PR53qpWVFSEffv2oahIt9eRF5SLH2LMBFAu3vCcq0JX0h49ejRGjx5trFpeCnZ2dsjLyyu/o1wOvDMKeJxeeh8XVxy4dwQo59BlZQ8k7O3tdcsEAP/EAJ9PL7db+K7dCG/hX2Yfcx4gAeByWasuKBc/xJgJoFy84TUXX7fWFQFBEHSeMC2bPAOF8z4tdbv1pBmwMIOLWOqTibXrgHwXN7CMtNL35+oO23YdRLPknxBCCH8MOge2d+9eDB48GCkpKVq3p6SkYPDgwThw4IBBRRUWFuKTTz5B7dq1YWtri4CAABw5cqTcx82dOxeCIGh82djYGFSHqVl0DYJ12CIILurnbwVXd1iHLeJyObwglcJqUtlHkKwmfkSDI0IIISZl0K1GevfujZSUFFy9erXUPq1atUKdOnWwd+9evYt6/fXXsX37dkybNg0NGzbE2rVrceHCBURGRqJLly6lPm7u3LmYN28eVq1apTZ5VyqV4vXXX9erBnO61QiTyyH/5wryH9yHbZ26kLZoxf0Aovh1kJQEV3dYTfyIy4FfScqLozk6OorqUgSUix9izARQLt6YWy59frcbdIrt6tWr6Nu3b5l9AgICDBocnT9/Hn/++Se+//57fPzxxwBezHVq3rw5Zs2ahaioqHL3MXToULi4uOj93OZKkEoh9WsDm2Z+kFpYmMWbrKIsugZB2qkb5P9cgSw9DRaubqIY+BVna2tr6hIqBeXihxgzAZSLN7zmMugUW2ZmZrnL9lxcXJCRkaH3vrdv3w6pVIp3331X1WZjY4Px48cjOjoaycnJ5e6DMYacnBwuL21eGplMhv3793M72U0bQSoFa+aHQ8+KwJr5iWpwJMbXC6BcPBFjJkCcuZhcjueXz+PSD9/j+WXx3W+T19fLoCNIrq6uiIuLK7NPXFwcnJ2d9d73lStX0KhRI41DX+3btwcAxMTEwNPTs8x91K9fX3X16IEDB2LJkiVwd3cv8zGFhYUoLCxU/TsnJwfAiyWKyuWJEokEUqkUcrkcCsV/169WtstkMrVBmVQqhUQiKbW95LJH5VXJS76RLCwsVI8v/hhLS0soFArIi/0wCYIACwuLUttLq90UmYrnUf5XDJlkMplaLrFkUuZREkum4rkUCoXafiiTeWUqTiyZFGdOQP7LcrXbLslcXGHx/jRYd+/JZSbgv8/y4p+F5vA66XO5AYMGSN26dcOOHTtw7do1tGzZUmP71atXsXv3bgwePFjvfT98+BAeHh4a7cq20iaGA0CNGjUwefJkdOzYEdbW1jh16hR+/vlnnD9/HhcvXizzfOPChQsxb948jfbDhw+rlpN7eXmhVatWuHbtGpKSklR9GjdujCZNmuD8+fNIT/9vWb6/vz+8vb1x8uRJ5Ob+d9HDjh07ws3NDYcPH1Z7UwUFBcHW1hb79+9Xq6FPnz6qZfTKyeoWFhYIDQ1FRkYGoqOjVX0dHR0RHByM5ORkxMTEqNpdXV3RqVMnxMfHqw1uTZkpPz8fkZGRqlxiywQAkZGRosukJKZMyg/5x48f48KFC5TJjDO9+uqrAKC2cMecMjHGEBAQAFtbWxw6dEgtU0hICPLz83Hy5EkAQK2EeLQ5ugdAydsupaNowWeQyYqQVMcH165dU21zcXFBhw4dEBcXh/j4eFV7w4YN0bp1a7N5nUp+7h05csQsXiflwQ9dGDRJ+9q1a2jfvj2srKzw8ccfo1evXqhTpw4ePHiAw4cPY8mSJSgqKsK5c+e0DqDK4uvri8aNG2u8CHfv3oWvry+WLVuGadOm6by/TZs24Y033sDChQvx6aelL5nXdgTJ09MTGRkZqoGVKUfoRUVFOHDgAHr16gVLyxcXURTDkYn8/HwcOXJElUsMmZRHkJS57OzsRJEJgCpXnz59VPXwnql4rtdee03tAreUybwyKe3fv1/ts9CcMuXl5el09kSn+20WPtfpfpsAkJ2djerVq5vN66T8LC8oKFB9FlpZWZn8dcrJyYGLi4tOk7QNGiABwI4dOzBmzBjk5+ertTPG4ODggPXr12PgwIF677d58+Zwd3fHsWPH1Npv3LiBZs2a4ZdffsF7772n1z49PDzQrFkzHD16VOfHmNMqNuDF91Umk8FCJJO0lSgXXygXP8SYCTD/XGlpaeVO6QCArk6OONCmabn9el+6We5tlwAgNTXVLG/pYW6vV6WvYgOAIUOGoGvXrqol+E+ePIGTkxPat2+PMWPGwNXV1aD9enh44MGDBxrtDx8+BADUrl1b7316enoiMzPToHrMSX5+Phwdy785LW8oF18oFz/EmAkw71y6XuHf2PfbNOc7C5jz61WWCt0szc3NDbNmzcK2bdtw+PBhbN26FR9//LHBgyPgxTnEW7duaZwnPHfunGq7PhhjSExMrFBN5kAmkyEyMpLLlQBloVx8oVz8EGMmwPxzKW+9VN5X+K7dOu0vfNdunfan690Mqpq5v15lMbu7yQ4dOhRyuRyrV69WtRUWFiI8PBwBAQGqFWxJSUmIjY1Ve2zxyVpKq1atQnp6Ol577bXKLZwQQshLT3nrpfK+7Np10LhLgsa+XN1h166DTvszh9NXYmN292ILCAjAsGHDMHv2bKSlpaFBgwZYt24dEhMTsWbNGlW/0aNH48SJE2oTsby9vTFixAi0aNECNjY2OH36NP7880/4+/vrPW+JEEIIqSzK2y6Vdb9Nuu2SaZndAAkA1q9fjy+++AJ//PEHsrKy0LJlS+zduxfdunUr83FvvPEGoqKisGPHDhQUFMDb2xuzZs3CZ599ZtbnZ3WlXC0gNpSLL5SLH2LMBIgnl0XXICBskahvuwTw+3oZvIpN7MxtFRshpsbkcij+iQHLzIDg7AJJC3/665YQI6CfrapTJavYSNVSKBTIyMiAi4uL2jVNeEe5+KD15sIubrCaNF0Uf+WK7fUCxJkJEGcuQSqF0LIVHv9/LkEkuQC+Xy++qn2JyeVyREdHq11cSwwoV9VjjOHp06e6fx09gMJ5n0JRbHAEACwjDYXzPsXTowd02o85H6w259fLUGLMBFAu3vCcy6AjSCkpKQZdj4gQYnpPnz7V+ZokOl3td+5sna72m5ubCwcHB/0LJoQQEzDoCJKPjw8GDBiAvXv3ql3+mxBi/p49e6Zz385OjqhrY611cAQAEkGAp401OjuVP+DS53kJIcTUDBogdejQAXv27MGAAQPg5eWFL7/8EomJiUYujRQnCAIcHR1Fd60LylX19FnRacyr/ZrzSlJzfr0MJcZMAOXiDc+5DF7FduvWLfz222/4448/kJaWBolEgp49e+Kdd97BgAEDuF3Wp0Sr2IhYMcZ0P5rzTwzw+fTy+y1YCrTwL7OLnZ0dlx+ShBDx0Od3u8GTtBs1aoTvv/8e9+/fx7Zt29CrVy8cPXoUw4cPR506dfDJJ5/g1q1bhu6elKBQKHDv3j3RndKkXFVP1yv9Gvtqv+Y8ODLn18tQYswEUC7e8JyrwqvYLCwsMGTIEBw4cACJiYkICwuDRCLB4sWL0bRpUwQFBWHr1q1mvYKFB3K5HDExMVyuBCgL5TJvyqv9lkUMV/sVy+tVnBgzAZSLNzznMtoyf4VCgUuXLuHChQtIT08HYwyenp44c+YMXn/9dfj5+SE+Pt5YT0cIqSIWXYNgHbZI40iS4OoO67BForgOEiGElFThiUJ3797F//73P6xbtw6PHj1SHVF6//33ERQUhEePHmHZsmVYtmwZPvjgAxw9etQYdRNCqpBF1yBIO3XD85iLuBJxDK2Ce8DKvy33R44IIaQ0Bg2QioqKsGPHDvz22284ceIEFAoF6tWrh2+++QZvv/023Nz++0uzVq1a+Pbbb5GTk4P169cbrfCXjSAIcHV1Net5HIagXPwQpFJI/dpAUSiH1K+NqAZHony9RJgJoFy84TmXQavYXF1dkZmZCalUin79+uG9997Dq6++WuZjFi1ahDlz5nAzUYtWsRFCCCHiUumr2Ozs7DBv3jzcu3cPO3bsKHdwBAATJ05EQkKCIU9H8GKiW2xsLJcT3cpCufhCufghxkwA5eINz7kMGiAlJibi888/h4eHh86PqVatGry9vQ15OoIXk+Dj4uK4OQKnK8rFF8rFByaXQxZzEdl7/oIs5iIYh7+cSiO210qJcpkfg+Yg8XgukRBCXgayU5F4/vNSsIw0tAAgO/Q35C5usJo0nVYcEqIHgwZI48aNK7ePRCJBtWrV0LhxY/Tt2xd16tQx5KkIIYToSHYqEoXzPtVoZxlpL9rpsgyE6MygAdLatWtVR5G0zfEWBEGt/cMPP8SXX36Jzz//3MAyiUQigZeXFyQSo126yixQLr5QrqqnUCiQkZGhQ0c5bFd8DwGAtmP8DED+T4uR37AJICl/BaKLi4tZfj/M+bWqCMplfgxaxZaQkIBp06bh/PnzmDp1Kjp37gx3d3ekpqbizJkz+PHHH9G+fXt89tlnuHr1KhYsWIDk5GRs2rQJI0aMqIwcRker2Agh5iAtLQ3u7u7l9uvq5IgDbZqW26/3pZs4lZ1bbr/U1FS1S7YQIgaVvopty5YtOHfuHGJiYvDpp5+ia9euaNSoEbp27YpPP/0Uly9fxtmzZxEZGYkJEybgzJkzcHBwwMqVKw0KRF6sBLhy5QqXKwHKQrn4QrnMVy1rS6P2M1dieK20oVzmx6AB0po1azB8+PBS/6qpVasWhg0bht9++w0AUKdOHfTt2xdXr141vNKXnEKhQFJSEpcrAcpCufhCuaqei4sLUlNTy/1a+edWnfa38s+tOu3PxcWlkpMZxpxfq4qgXObHoDlI9+/fh7W1dZl9bGxscP/+fdW/vby8UFBQYMjTEULIS0sikeh0qovV7I58FzewjLRS+wiu7qjZtbuoroJOSGUx6AhSnTp1sGvXrlIHPAUFBdi1a5fayrW0tDTUqFHDsCoJIYSUSZBKYTVpepl9rCZ+RIMjQnRk0ABp/PjxuHPnDrp06YLdu3fj8ePHAIDHjx9j9+7d6NKlC+7evat2OYBTp07Bz8/POFW/hCQSCRo3bszlSoCyUC6+UC7zZtE1CNZhiyC4qB9xElzdYS2SJf5iea1Kolzmx6BVbHK5HG+//TY2bNigWu4vkUhU5xgZYxg1ahTWr18PiUSC1NRULFq0CK+99hpCQkKMm6CS0Co2QgivmFwOxT8xYJkZEJxdIGnhT0eOCEEVrGKTSqVYv349jh49itGjR8Pf3x8+Pj7w9/fHmDFjcOTIEWzYsEE1YnR3d8eyZcu4GRyZI5lMhqioKMhkMlOXYlSUiy+Uiw+CVArW3A/nbRzBmvuJanAkttdKiXKZH4MmaZ88eRLVqlVDcHAwgoODjV0T0YIxhvT0dK0X5uQZ5eIL5eKHGDMBlIs3POcy6AhSUFAQVq9ebexaCCGEEELMgkEDJDc3N9jY2Bi7FkIIIYQQs2DQAKlXr144fvw4l4fMeCWVSuHv7w+piOYSAJSLN5SLH2LMBFAu3vCcy6BVbCkpKejYsSNeffVVfPvtt3B2dq6M2kyKVrERQggh4lLpq9jefPNNODk54ffff0edOnXwyiuvICgoSDVpW/nVo0cPgwIQTTKZDBEREVyuBCgL5eIL5eKHGDMBlIs3POcyaBXb8ePHVf9fWFiI2NhYxMbGavRTXiOJVBxjDLm5uaI7rUm5+EK5+CHGTADl4g3PuQwaIPF40zlCCCGEEF3xd+1vQgghhJBKZtAk7eLy8vJw69YtPH36FF27djVWXSZnbpO0FQoFMjIy4OLiwuU9bUpDufhCufghxkwA5eKNueWq9EnaAJCYmIgBAwagRo0aaNeuHYKC/rsJ4pkzZ/DKK6+ozVUiFSORSODm5mYWbzBjolx8oVz8EGMmgHLxhudcBlWclJSEDh06YP/+/RgwYAA6duyoNgErICAAGRkZ2Lx5s9EKfdkVFRVh3759KCoqMnUpRkW5+EK5+CHGTADl4g3PuQwaIIWFhSErKwsnTpzA9u3b0atXL7XtFhYW6Nq1K86cOWOUIskLPC6T1AXl4gvl4ocYMwGUize85jJogHTo0CEMGjQInTp1KrWPt7c3Hjx4YHBhhBBCCCGmYtAAKTMzEz4+PmX2YYyhsLDQkN0TQgghhJiUQQMkd3d3xMfHl9nnn3/+gZeXl0FFEU0WFhYICgqChYVBl64yW5SLL5SLH2LMBFAu3vCcy+Cb1e7duxfXrl3Tuv3UqVOIiIhAnz59KlQcUWdra2vqEioF5eIL5eKHGDMBlIs3vOYyaID0+eefw9bWFt26dcPXX3+N27dvAwAOHDiAL774Aq+99hpcXFwwc+ZMoxb7MpPJZNi/fz+3k91KQ7n4Qrn4IcZMAOXiDc+5DDrm5ePjg0OHDmHkyJH44osvIAgCGGPo27cvGGPw8vLC9u3b4eHhYex6CSGEEEIqncEnBQMCAhAfH489e/bg3LlzyMzMRLVq1RAQEIABAwbAysrKmHUSQgghhFSZCs2asrCwwKBBgzBo0CBj1UMIIYQQYnIVvhebWJnbvdgYY5DJZLCwsIAgCKYux2goF18oFz/EmAmgXLwxt1z6/G43+AjS8+fPsWvXLly4cAHZ2dmQy+UafQRBwJo1awx9ClJCfn4+HB0dTV2G0VEuvlAufogxE0C5eMNrLoMGSPfu3UOvXr1w584dlHUAigZIxiOTyRAZGYk+ffrA0tLS1OUYDeXiC+XihxgzAZSLNzznMmiA9NFHH+H27dt46623MG7cONStW5fLi0ARQgghhGhj0KgmIiICPXr0wLp164xdDyGEEEKIyRl0oUiFQoFWrVoZuxZSDrEepaNcfKFc/BBjJoBy8YbXXAatYuvVqxdsbGywZ8+eyqjJLJjbKjZCCCGEVIw+v9sNOoK0aNEiREREYPv27QYVSPSnUCiQlpYGhUJh6lKMinLxhXLxQ4yZAMrFG55zGTRA2rdvH4KCgjBixAgEBwfj448/xvz58zW+vvrqK4OKKiwsxCeffILatWvD1tYWAQEBOHLkiE6PffDgAYYPHw4nJydUq1YNAwYMwN27dw2qw5zI5XJER0drvZwCzygXXygXP8SYicnlKLpyAXfX/Q9FVy6AiSibGF8vgO9cBp0YnDt3rur/jx8/juPHj2vtJwgCvvjiC733P3bsWGzfvh3Tpk1Dw4YNsXbtWvTp0weRkZHo0qVLqY/Ly8tDUFAQnjx5gjlz5sDS0hLLli1DYGAgYmJiULNmTb1rIYQQYnqyU5F4/vNSsIw0tAAgO/Q35C5usJo0HRZdg0xdHhEhgwZIkZGRxq5D5fz58/jzzz/x/fff4+OPPwYAjB49Gs2bN8esWbMQFRVV6mNXrlyJ+Ph4nD9/Hu3atQMA9O7dG82bN8eSJUvwzTffVFrdhBBCKofsVCQK532q0c4y0l60hy2iQRIxOoMGSIGBgcauQ2X79u2QSqV49913VW02NjYYP3485syZg+TkZHh6epb62Hbt2qkGRwDQpEkT9OjRA1u3buV6gCQIAhwdHc3iUu3GRLn4Qrn4Ye6ZFAoFMjIydOgoh+2K7yEA0JaEAcj/aTHyGzYBJNJyd+fi4gKJxKDZJZXK3F8vQ/Gcy+zuxdarVy88ePAAN27cUGs/duwYevbsid27d6Nfv34aj1MoFLCzs8O4ceOwcuVKtW1ffPEFFixYgJycnFIvd15YWIjCwkLVv3NycuDp6YmMjAzVTHeJRAKpVAq5XK424UzZLpPJ1K4sLpVKIZFISm0vKipSq0G5FFImk+nUbmlpCYVCoXZuVxAEWFhYlNpeWu2UiTJRJspUlZnS0tJQt25dlKerkyMOtGlabr/el27iVHZuuf3u37+PWrVq0ev0kmbKycmBi4tL5d6LTSaTYcWKFdi8eTNiY2Px7Nkz1TcoJiYGq1evxrRp09CoUSO99vvw4UN4eHhotCvbUlJStD4uMzMThYWF5T62cePGWh+/cOFCzJs3T6P98OHDsLOzAwB4eXmhVatWuHbtGpKSklR9GjdujCZNmuD8+fNIT09Xtfv7+8Pb2xsnT55Ebu5/P7gdO3aEm5sbDh8+rPamCgoKgq2tLfbv369WQ58+ffDs2TO1uV4WFhYIDQ1FRkYGoqOjVe2Ojo4IDg5GcnIyYmJiVO2urq7o1KkT4uPjERcXp2o3Zab8/Hy107WUiY9MEolEdJksLCzQvHlzWFtb49y5c5SpCjJlZ2dDF7Wsdbs9ha79jh49ipYtW5rV6xQaGoq0tDSzfJ0qkskcPyNycnKgK4OOIOXn5+PVV19FVFQUXFxcYGlpiYcPH6pGhE+ePEGtWrUwY8YMLFiwQK99+/r6onHjxhovwt27d+Hr64tly5Zh2rRpGo9LTk6Gl5cXvv32W8yaNUtt2++//47x48fjypUr8Pf31/q85n4EqaioCAcOHECvXr1U97Mx1xG6rpmAF++lI0eOqHKJIZNMJkNRUZEql52dnSgyAVDl6tOnj6oe3jMVz/Xaa6+pnX6hTJWXSXmKrbxM+CcGDou+RHnyP/8GsibNys3k4uICCwsLs3mdgBef5YWFhTh48KDqs9BcXqeKZFIoFCgoKFB9FlpZWZk8U6UfQfrmm29w5swZLFq0CDNnzsS8efPUlvRXr14dgYGBOHTokN4DJFtbW7WBilJBQYFqe2mPA2DQYwHA2toa1tbWGu3KX9rFSaVSSKWa57pLu1poae2l3bhPW7vy/G3JeiQSidbz6aW1l1a7KTIVby+eSyyZiv+/mDIpiTWTPlkpU8Uy1alTR2uf4phLT+T/7yewjLRS+wiu7qjZtTsELfWVxdxeJ+U2XT7jeXvvWVpaqp7LlJn0uWGuQTPVtmzZgqCgIMyaNQuCIGidfFW/fn21Q2K68vDwwMOHDzXalW21a9fW+jhnZ2dYW1sb9FhCCCHmSZBKYTVpepl9rCZ+pPfgiJDyGDRASkpKQtu2bcvs4+joiCdPnui9b39/f9y6dUvjPKHy3Gxpp8gkEglatGiBixcvamw7d+4c6tevX+oEbR4IggBXV1cuVwKUhXLxhXLxQ0yZLLoGwTpsEQQXN7V2wdUd1iJZ4i+m16s4nnMZNEBydHREWlrphzsB4M6dO3B1ddV730OHDoVcLsfq1atVbYWFhQgPD0dAQIBqiX9SUhJiY2M1HnvhwgW1QVJcXBwiIiIwbNgwvWsxJxYWFujUqRO3N/0rDeXiC+Xih9gyWXQNgu3GXbBZvBLWc+bDZvFK2G7YKYrBESC+10uJ51wGDZA6dOiAPXv2lLoKITk5Gfv370e3bt303ndAQACGDRuG2bNnY9asWVi9ejWCg4ORmJiI7777TtVv9OjRaNpUfennxIkT4evri9DQUHz//ff44Ycf0KtXL7i7u2PGjBl612JO5HI5YmNjubxce1koF18oFz/EmEmQSoEW/rhd2xto4S+q02pifL0AvnMZNECaOXMmsrKy0KNHD5w5c0Y1e/3Zs2c4duwYQkJCIJPJMH162eeNS7N+/XpMmzYNf/zxB6ZMmYKioiLs3bu33AGXo6Mjjh8/jm7dumHBggX44osv4OfnhxMnThh0NMucKBQKxMXFcXnDv7JQLr5QLn6IMRNAuXjDcy6Djnl169YNP/30E6ZOnao2aFHO8ZFKpVi5ciXatGljUFE2Njb4/vvv8f3335fap/g1gYqrW7cutm3bZtDzEkIIIYQAFbhQ5AcffIDu3bvjl19+wblz55CZmYlq1aohICAAEydORLNmzYxZJyGEEEJIlanQrKmmTZti+fLlxqqFlEEikcDLy8ss7yFUEZSLL5SLH2LMBFAu3vCcy+zuxWYucnJyUL16dZ2utkkIIYQQ86fP73b+hnQvKblcjitXrnC5EqAslIsvlIsfYswEUC7e8JyLBkicUCgUSEpK4nIlQFkoF18oFz/EmAmgXLzhORcNkAghhBBCSuDv0pZVRDk1q+QtT0ylqKgIz549Q05Ojl432zN3lIsvlIsfYswEUC7emFsu5e90XaZf0wCpFLm5uQCgurUJIYQQQsQhNzcX1atXL7MPrWIrhUKhQEpKChwdHc3iJns5OTnw9PREcnKyqFbVUS6+UC5+iDETQLl4Y265GGPIzc1F7dq1y730AB1BKoVEIkHdunVNXYaGatWqmcWbzNgoF18oFz/EmAmgXLwxp1zlHTlSoknahBBCCCEl0ACJEEIIIaQEGiBxwtraGmFhYbC2tjZ1KUZFufhCufghxkwA5eINz7lokjYhhBBCSAl0BIkQQgghpAQaIBFCCCGElEADJEIIIYSQEmiARAghhBBSAg2QCCGEmJyPjw/Gjh1r6jIIUaEBkpm5c+cO3nvvPdSvXx82NjaoVq0aOnfujOXLl+OTTz6BIAjlfnXv3t3UMTSIMVdZmfLz81X95HI5wsPD0b17dzg7O8Pa2ho+Pj54++23cfHiRaPUsnbt2jK/d2fPnjXK8ygpFAqsXbsW/fv3h6enJ+zt7dG8eXMsWLAABQUFpT7u5s2bEAQBNjY2yM7O1tqne/fuaN68uVHrTUhIwOTJk9GoUSPY2dnBzs4Or7zyCiZNmoRr165pfcysWbMgCAJGjBhh1FqMiZdcZb0/P/300yqro7Lp+nNYvE0ikaB27dp49dVXcfz4cbX9+fj4QBAE9OzZU+vz/fbbb6r9GOuz5GXKVR661YgZ2bdvH4YNGwZra2uMHj0azZs3x/Pnz3H69GnMnDkTXbp0wR9//KHqn5eXhw8++ACDBg3C4MGDVe3u7u6mKL9UYsxVXqbr169j9erVyM/Px+DBg3Hw4EF069YNc+bMgbOzMxITE7F161asW7cOSUlJRrutzfz581GvXj2N9gYNGhhl/0rPnj3D22+/jQ4dOuD999+Hm5sboqOjERYWhmPHjiEiIkLrPQw3bNiAWrVqISsrC9u3b8eECROMWpc2e/fuxYgRI2BhYYE33ngDfn5+kEgkiI2NxV9//YVVq1YhISEB3t7eqscwxrB582b4+Phgz549yM3NhaOjY6XXqg8ec2l7fxp7MGwOdPk57NWrF0aPHg3GGBISErBy5UoEBwdj37596N27t6qfjY0NIiMj8ejRI9SqVUttfxs3boSNjU2Zf5QYk1hzlYoRs3D37l3m4ODAmjRpwlJSUjS2x8fHsx9++EGtLT09nQFgYWFhVVSl/sSYS59MkyZNYgDYsmXLNPrJZDL2/fffs+Tk5ArXFB4ezgCwCxcuVHhfuigsLGRnzpzRaJ83bx4DwI4cOaKxTaFQMB8fHzZ9+nQ2aNAg1r17d637DgwMZM2aNTNKnbdv32b29vasadOmWl+roqIitnz5cpaUlKTWHhERwQCwiIgIZmlpydauXWuUeoyFt1y6vD+9vb3ZmDFjqqSeyqLrzyEANmnSJLW2a9euMQDs1VdfVbV5e3uzHj16sGrVqml8TiYnJzOJRMKGDBlS6T/7Ys1VHjrFZia+++475OXlYc2aNfDw8NDY3qBBA0ydOtUElVWMGHPpmun+/fv49ddf0atXL0ybNk2jn1Qqxccff1ylN0VWKBRYvnw5WrRoARsbG7i6uuK1115TO4wtk8nw1VdfwdfXV3U6cM6cOSgsLFT1sbKyQqdOnTT2P2jQIAAvTqWVdObMGSQmJmLkyJEYOXIkTp48ifv371dCyv989913ePr0KcLDw7W+VhYWFpgyZQo8PT3V2jdu3IhXXnkFQUFB6NmzJzZu3FipdepLrLlKunv3LoYNGwZnZ2fY2dmhQ4cO2Ldvn1qf48ePQxAEbNmyBXPmzEGtWrVgb2+P/v37Izk5Wa1vfHw8hgwZglq1asHGxgZ169bFyJEj8eTJk6qMpVWLFi3g4uKChIQEtXYbGxsMHjwYmzZtUmvfvHkzatSogZCQkKosU28856JTbGZiz549qF+/vtZfOjwTYy5dMx04cAAymQxvvfVWFVUGPHnyBBkZGWptgiCgZs2aAIDx48dj7dq16N27NyZMmACZTIZTp07h7NmzaNu2LQBgwoQJWLduHYYOHYoZM2bg3LlzWLhwIW7evImdO3eW+fyPHj0CALi4uGhs27hxI3x9fdGuXTs0b94cdnZ22Lx5M2bOnGmM6Frt3bsXDRo0QEBAgM6PKSwsxI4dOzBjxgwAwOuvv463335b66kAU+E1l7b3p7b3CgCkpqaiU6dOePbsGaZMmYKaNWti3bp16N+/P7Zv364ajCt9/fXXEAQBn3zyCdLS0vDDDz+gZ8+eiImJga2tLZ4/f46QkBAUFhbiww8/RK1atfDgwQPs3bsX2dnZOt/h3dCcxX8OtcnKykJWVpbW0+GjRo3Cq6++ijt37sDX1xcAsGnTJgwdOhSWlpZGq7s8Ys1VKpMduyIqT548YQDYgAED9HqcuZ+KEmMufTJ99NFHDAC7cuVKpdelPASu7cva2pox9t/plSlTpmg8XqFQMMYYi4mJYQDYhAkT1LZ//PHHqlMzZenZsyerVq0ay8rKUmt//vw5q1mzJvvss89UbaNGjWJ+fn4a+zDWKTblazVw4ECNbVlZWSw9PV319ezZM9W27du3MwAsPj6eMcZYTk4Os7Gx0Xqa1BR4zFXW+1Op5Cm2adOmMQDs1KlTqrbc3FxWr1495uPjw+RyOWOMscjISAaA1alTh+Xk5Kj6bt26lQFgy5cvZ4wxduXKFQaAbdu2zSQ5lT+HjL04FTV+/HiWnp7O0tLS2Llz51iPHj0YALZkyRJVP29vbxYaGspkMhmrVasW++qrrxhjjN24cYMBYCdOnKiS0+tizVUeOsVmBnJycgDA7CaBVpQYc+mTyRT5f/75Zxw5ckTt68CBAwCAHTt2QBAEhIWFaTxOOaF6//79AIDp06erbVcedSh5eqO4b775BkePHsWiRYvg5OSktu3AgQN4/PgxXn/9dVXb66+/jqtXr+L69ev6B9WB8vvv4OCgsa179+5wdXVVff3888+qbRs3bkTbtm1Vf/E6OjoiNDTUbE5H8ZxL2/uzNPv370f79u3RpUsXVZuDgwPeffddJCYm4saNG2r9R48erfazNnToUHh4eKje08ojRIcOHcKzZ8+MGUtDWT+HSmvWrIGrqyvc3NwQEBCAM2fOYPr06aWejh8+fDg2b94M4MVr6enpia5du1ZqjpLEmqs0dIrNDFSrVg0AkJuba+JKjEuMufTJZIr87du3V50qK+nOnTuoXbs2nJ2dS338vXv3IJFINA6H16pVC05OTrh3757Wx23ZsgWff/45xo8fjw8++EBj+4YNG1CvXj1YW1vj9u3bAABfX1/Y2dlh48aN+Oabb3SNqDPlL8u8vDyNbb/++ityc3ORmpqKN998U9WenZ2N/fv3Y/Lkyao6AaBz587YsWMHbt26hUaNGhm9Vn3wnKus92dJ9+7d03oKsWnTpqrtxVfANWzYUK2fIAho0KABEhMTAQD16tXD9OnTsXTpUmzcuBFdu3ZF//798eabbxr19BqgW84BAwZg8uTJEAQBjo6OaNasGezt7UvtP2rUKPz444+4evUqNm3ahJEjR2pdKVqZxJqrNDRAMgPVqlVD7dq18e+//5q6FKMSYy59MjVp0gQA8M8//8Df37+SKzMufT6gjhw5gtGjRyM0NBS//PKLxvacnBzs2bMHBQUFGr/EgBdzDpTzR4ypevXq8PDw0PpaKX/xKn95Km3btg2FhYVYsmQJlixZovG4jRs3Yt68eUatU19izVUVlixZgrFjx+Lvv//G4cOHMWXKFCxcuBBnz56t0sUSAFC3bt1SrwOkTUBAAHx9fTFt2jQkJCRg1KhRlVid4cSUi06xmYm+ffvizp07iI6ONnUpRiXGXLpm6t27N6RSKTZs2FBFlZXN19cXKSkpyMzMLLWPt7c3FAoF4uPj1dpTU1ORnZ2tdk0dADh37hwGDRqEtm3bYuvWrbCw0Pyb66+//kJBQQFWrVqFbdu2qX0tWLAA9+7dw5kzZ4wTsoTQ0FDcvn0b58+f16n/xo0b0bx5c406t23bhp49e2qsuDEVseYqztvbG3FxcRrtsbGxqu3FlXzPMsZw+/Zt+Pj4qLW3aNECn3/+OU6ePIlTp07hwYMHWgf25uj111/H8ePH0bRpU+7+6CqLueaiAZKZmDVrFuzt7TFhwgSkpqZqbL9z5w6WL19ugsoqRoy5dM3k6emJd955B4cPH8aKFSs0+ikUCixZsqTSl7orDRkyBIwxrUcKGGMAgD59+gAAfvjhB7XtS5cuBfDiF7PSzZs3ERoaCh8fH+zduxe2trZan3fDhg2oX78+3n//fQwdOlTt6+OPP4aDg0OlzYOZNWsW7OzsMG7cOK2vlTI3ACQnJ+PkyZMYPny4Rp1Dhw7F22+/jdu3b+PcuXOVUqs+xJqruD59+uD8+fNqf4g8ffoUq1evho+PD1555RW1/uvXr1c7nb19+3Y8fPhQdXHCnJwcyGQytce0aNECEolE7RIW5mzChAkICwvTehSQZ+aai06xmQlfX19s2rQJI0aMQNOmTdWuzhwVFYVt27ZxeZ8iMebSJ9OSJUtw584dTJkyBX/99Rf69u2LGjVqICkpCdu2bUNsbCxGjhxptNoOHDig+gu7uE6dOiEoKAhvvfUWfvzxR8THx+O1116DQqHAqVOnEBQUhMmTJ8PPzw9jxozB6tWrkZ2djcDAQJw/fx7r1q3DwIEDERQUBODFvKqQkBBkZWVh5syZGpO3fX190bFjR6SkpCAyMhJTpkzRWq+1tTVCQkKwbds2/Pjjj0Zf2tuwYUNs2rQJr7/+Oho3bqy64jT7/6v8btq0CRKJBHXr1sWmTZvAGEP//v217qtPnz6wsLDAxo0b9VpeXxnEmqu4Tz/9FJs3b0bv3r0xZcoUODs7Y926dUhISMCOHTsgkaj/fe/s7IwuXbrg7bffRmpqKn744Qc0aNAA77zzDgAgIiICkydPxrBhw9CoUSPIZDL88ccfkEqlGDJkiFFrL+vnsH79+gbv19vbG3Pnzq1AZRUj1lylMtn6OaLVrVu32DvvvMN8fHyYlZUVc3R0ZJ07d2YrVqxgBQUFan3NeTl8SWLMpWsmmUzG/ve//7GuXbuy6tWrM0tLS+bt7c3efvtto10CoKxluABYeHi4qpbvv/+eNWnShFlZWTFXV1fWu3dvdunSJdW+ioqK2Lx581i9evWYpaUl8/T0ZLNnz1bLlJCQUObzKZdrL1myhAFgx44dK7X2tWvXMgDs77//ZowZ90raSrdv32YffPABa9CgAbOxsWG2trasSZMm7P3332cxMTGMMcZatGjBvLy8ytxP9+7dmZubGysqKjJqfYbiJZehV9K+c+cOGzp0KHNycmI2Njasffv2bO/evWp9lMv8N2/ezGbPns3c3NyYra0tCw0NZffu3VP1u3v3Lhs3bhzz9fVlNjY2zNnZmQUFBbGjR48aPWd5P4fQcsVpbZTL4XV5TlMt8+c5V3kExoodiyWEEEI4cvz4cQQFBWHbtm0YOnSoqcshIkJzkAghhBBCSqABEiGEEEJICTRAIoQQQggpgeYgEUIIIYSUQEeQCCGEEEJKoAHSS+L48eMQBAHHjx83dSmEmNTcuXMNvq3JH3/8gSZNmsDS0lLjhryE6KIi7z9StWiARAghOoiNjcXYsWPh6+uL3377DatXrzZ1SXpbuXIl1q5da+oyiEhERUVh7ty5yM7ONujx5v5+pCtpE0KIDo4fPw6FQoHly5ejQYMGpi7HICtXroSLiwt3V68n5ikqKgrz5s3D2LFjDTqiau7vRzqCZOYUCgUKCgpMXQYhL720tDQAoFNrhLwkaIBURZTnnWNjYzF8+HBUq1YNNWvWxNSpU9UGQIIgYPLkydi4cSOaNWsGa2trHDx4EADw4MEDjBs3Du7u7rC2tkazZs3w+++/azzX/fv3MXDgQNjb28PNzQ0fffSR0W7GqMxx69YtvPnmm6hevTpcXV3xxRdfgDGG5ORkDBgwANWqVUOtWrU0bj5YWFiIsLAwNGjQANbW1vD09MSsWbM06gsPD0dwcDDc3NxgbW2NV155BatWrdKo5+LFiwgJCYGLiwtsbW1Rr149jBs3zihZCf9Onz6Ndu3awcbGBr6+vvj111+19tuwYQPatGkDW1tbODs7Y+TIkUhOTlZt9/HxQVhYGADA1dUVgiBU+r2j7t27h4kTJ6Jx48awtbVFzZo1MWzYMCQmJqr1K21Oy9q1ayEIgqq/j48Prl+/jhMnTkAQBAiCgO7du6v63717F8OGDYOzszPs7OzQoUMHjXvsEf2U9/4LDAyEn5+f1sc2btwYISEhAIDExEQIgoDFixdj9erV8PX1hbW1Ndq1a4cLFy5Ueg5t5s6di5kzZwIA6tWrp3pPJSYmQiaT4auvvlLV6ePjgzlz5qh9zpf3fjQHdIqtig0fPhw+Pj5YuHAhzp49ix9//BFZWVlYv369qk9ERAS2bt2KyZMnw8XFBT4+PkhNTUWHDh1UAyhXV1ccOHAA48ePR05ODqZNmwYAyM/PR48ePZCUlIQpU6agdu3a+OOPPxAREWHUHMobtS5atAj79u3DggUL4OzsjF9//RXBwcH49ttvsXHjRnz88cdo164dunXrBoVCgf79++P06dN499130bRpU/zzzz9YtmwZbt26hV27dqn2v2rVKjRr1gz9+/eHhYUF9uzZg4kTJ0KhUGDSpEkAXvxF/+qrr8LV1RWffvopnJyckJiYiL/++suoWQmf/vnnH9X7Y+7cuZDJZAgLC4O7u7tav6+//hpffPEFhg8fjgkTJiA9PR0rVqxAt27dcOXKFTg5OeGHH37A+vXrsXPnTqxatQoODg5o2bJlpdZ/4cIFREVFYeTIkahbty4SExOxatUqdO/eHTdu3ICdnZ1e+/vhhx/w4YcfwsHBAZ999hkAqL4Xqamp6NSpE549e4YpU6agZs2aWLduHfr374/t27dj0KBBRs8ndrq8/9566y288847+Pfff9G8eXNV+4ULF3Dr1i18/vnnavvctGkTcnNz8d5770EQBHz33XcYPHgw7t69a/QbPZdn8ODBuHXrFjZv3oxly5bBxcUFwIs/ICZMmIB169Zh6NChmDFjBs6dO4eFCxfi5s2b2LlzJ4Cy349mw2R3gXvJhIWFMQCsf//+au0TJ05kANjVq1cZYy9u9ieRSNj169fV+o0fP555eHiwjIwMtfaRI0ey6tWrs2fPnjHGGPvhhx8YALZ161ZVn6dPn7IGDRowACwyMtIoOd59911Vm0wmY3Xr1mWCILBFixap2rOyspitra3qBpR//PEHk0gk7NSpU2r7/OWXXxgAdubMGVWbMk9xISEhrH79+qp/79y50+Q3MyTma+DAgczGxkbthqU3btxgUqmUKT/6EhMTmVQqZV9//bXaY//55x9mYWGh1q5876enp1dJ/dp+BqKjoxkAtn79eo26SlLe7DMhIUHV1qxZMxYYGKjRd9q0aQyA2s9mbm4uq1evHvPx8WFyubxiYV5Curz/srOzmY2NDfvkk0/UHjtlyhRmb2/P8vLyGGP/3Ry6Zs2aLDMzU9Xv77//ZgDYnj17qiCRpu+//17jPRYTE8MAsAkTJqj1/fjjjxkAFhERoWor7f1oLugUWxVTHv1Q+vDDDwEA+/fvV7UFBgbilVdeUf2bMYYdO3agX79+YIwhIyND9RUSEoInT57g8uXLqv14eHio3bTRzs4O7777rlFzTJgwQfX/UqkUbdu2BWMM48ePV7U7OTmhcePGuHv3LgBg27ZtaNq0KZo0aaKWITg4GAAQGRmpeqytra3q/588eYKMjAwEBgbi7t27ePLkiWr/ALB3714UFRUZNR/hm1wux6FDhzBw4EB4eXmp2ps2bao6bQEAf/31FxQKBYYPH672nqxVqxYaNmyo9p6sasV/BoqKivD48WM0aNAATk5Oqp93Y9m/fz/at2+PLl26qNocHBzw7rvvIjExETdu3DDq84mdru+/6tWrY8CAAdi8eTPY/1+zWS6XY8uWLappEsWNGDECNWrUUP27a9euAKD6jDUHyt9l06dPV2ufMWMGAHB12pYGSFWsYcOGav/29fWFRCJRm1dQr149tT7p6enIzs7G6tWr4erqqvb19ttvA/hvAum9e/fQoEEDjTkJjRs3NmqO4j/0wIsfdBsbG9Vh1uLtWVlZAID4+Hhcv35dI0OjRo3UMgDAmTNn0LNnT9jb28PJyQmurq6YM2cOAKgGSIGBgRgyZAjmzZsHFxcXDBgwAOHh4Uabb0X4lZ6ejvz8fI2fN0D9ZyE+Ph6MMTRs2FDjfXnz5k2192RVy8/Px5dffglPT09YW1vDxcUFrq6uyM7OVv0MGMu9e/e0fkY0bdpUtZ3oTtf3HwCMHj0aSUlJOHXqFADg6NGjSE1NxVtvvaXx2JKfu8rBkvIz1hzcu3cPEolEY6VnrVq14OTkxNV7ieYgmZi2yZXF/3IEXqxkA4A333wTY8aM0bqfyp4PUZJUKtWpDYDqLyOFQoEWLVpg6dKlWvt5enoCAO7cuYMePXqgSZMmWLp0KTw9PWFlZYX9+/dj2bJlqu+HIAjYvn07zp49iz179uDQoUMYN24clixZgrNnz8LBwcEYUYmIKRQKCIKAAwcOaH3/mvI99OGHHyI8PBzTpk1Dx44dUb16dQiCgJEjR6p+BgDtnyHAiyMRxPyFhITA3d0dGzZsQLdu3bBhwwbUqlULPXv21Ohb3mesORHDxTBpgFTF4uPj1Y4Q3b59GwqFAj4+PqU+xtXVFY6OjpDL5Vp/aIrz9vbGv//+C8aY2hs0Li6uwrVXlK+vL65evYoePXqU+cOzZ88eFBYWYvfu3Wp/MZV2uqNDhw7o0KEDvv76a2zatAlvvPEG/vzzT7XTgOTl4urqCltbW8THx2tsK/6z4OvrC8YY6tWrpzqSaS62b9+OMWPGqK0ELSgo0Lgon/IoQnZ2ttolCLT9pV7az523t7fWz4jY2FjVdqI7Xd9/wItBz6hRo7B27Vp8++232LVrF955551SB0PmRNv7ydvbGwqFAvHx8aojkMCLhQDZ2dlq7yVzH0TRKbYq9vPPP6v9e8WKFQCA3r17l/oYqVSKIUOGYMeOHfj33381tqenp6v+v0+fPkhJScH27dtVbc+ePTOLq/4OHz4cDx48wG+//aaxLT8/H0+fPgXw319Jxf8qevLkCcLDw9Uek5WVpfGXk7+/PwDQabaXnFQqRUhICHbt2oWkpCRV+82bN3Ho0CHVvwcPHgypVIp58+ZpvJcYY3j8+HGV1VySVCrVqGnFihUaR4Z8fX0BACdPnlS1PX36FOvWrdPYp729vdarHvfp0wfnz59HdHS02j5Wr14NHx8ftTmRpHy6vv+U3nrrLWRlZeG9995DXl4e3nzzzaos12DKOVLF31N9+vQB8GKVWnHKMwehoaFqjzf0KtxVgY4gVbGEhAT0798fr732GqKjo7FhwwaMGjWq1GthKC1atAiRkZEICAjAO++8g1deeQWZmZm4fPkyjh49iszMTADAO++8g59++gmjR4/GpUuX4OHhgT/++EPvJcGV4a233sLWrVvx/vvvIzIyEp07d4ZcLkdsbCy2bt2KQ4cOoW3btnj11VdhZWWFfv36qT4wfvvtN7i5ueHhw4eq/a1btw4rV67EoEGD4Ovri9zcXPz222+oVq2a6oeUvLzmzZuHgwcPomvXrpg4cSJkMhlWrFiBZs2a4dq1awBeDC4WLFiA2bNnIzExEQMHDoSjoyMSEhKwc+dOvPvuu/j4449NUn/fvn3xxx9/oHr16njllVcQHR2No0ePombNmmr9Xn31VXh5eWH8+PGYOXMmpFIpfv/9d7i6uqr9cgaANm3aYNWqVViwYAEaNGgANzc3BAcH49NPP8XmzZvRu3dvTJkyBc7Ozli3bh0SEhKwY8cOSCT0t7S+dHn/KbVq1QrNmzdXLWRp3bq1iarWT5s2bQAAn332GUaOHAlLS0v069cPY8aMwerVq5GdnY3AwECcP38e69atw8CBAxEUFKT2eG3vR7NhiqVzLyPlUtwbN26woUOHMkdHR1ajRg02efJklp+fr+oHgE2aNEnrPlJTU9mkSZOYp6cns7S0ZLVq1WI9evRgq1evVut379491r9/f2ZnZ8dcXFzY1KlT2cGDB426zL/kUucxY8Ywe3t7jf6BgYGsWbNmqn8/f/6cffvtt6xZs2bM2tqa1ahRg7Vp04bNmzePPXnyRNVv9+7drGXLlszGxob5+Piwb7/9lv3+++9qS0ovX77MXn/9debl5cWsra2Zm5sb69u3L7t48WKFMhLxOHHiBGvTpg2zsrJi9evXZ7/88ovWZfE7duxgXbp0Yfb29sze3p41adKETZo0icXFxan6VPUy/6ysLPb2228zFxcX5uDgwEJCQlhsbCzz9vZWXTpD6dKlSywgIIBZWVkxLy8vtnTpUq3L/B89esRCQ0OZo6MjA6C2xPrOnTts6NChzMnJidnY2LD27duzvXv3VklWsdL1/ccYY9999x0DwL755huNbcpl/t9//73GNgAsLCysMsrXyVdffcXq1KnDJBKJ6v1WVFTE5s2bx+rVq8csLS2Zp6cnmz17NisoKFB7bFnvR3MgMGaGs7tEaO7cuZg3bx7S09M1VnoRQgh5uS1fvhwfffQREhMTNVarEdOg46aEEEKICTHGsGbNGgQGBtLgyIzQHCRCCCHEBJ4+fYrdu3cjMjIS//zzD/7++29Tl0SKoQESIYQQYgLp6ekYNWoUnJycMGfOHPTv39/UJZFiaA4SIYQQQkgJNAeJEEIIIaQEGiARQgghhJRAAyRCCCGEkBJogEQIIYQQUgINkAghhBBCSqABEiFmYO3atRAEAWvXrjV1KTqZO3cuBEHA8ePHTV2KWUtMTIQgCBg7dqxJnr979+5mf8f0qmbq14TwgwZIhFQB3j6Ujx8/DkEQMHfuXFOXYvZoEKI/Hx8f+Pj4mLoMQspEF4okxAwMGjQIHTp0gIeHh6lL0cnkyZMxcuRIui0CIUS0aIBEiBmoXr06qlevbuoydObi4kI3XSaEiBqdYiNEi5MnT6Jfv35wcXGBtbU1GjZsiM8//xzPnj3T6Ltjxw4EBgbCzc0NNjY2qF27Nnr27IkdO3YAeDG/qF69egCAdevWQRAE1ZdyDk9pc5AEQUD37t3x4MEDjBo1Ci4uLnB0dERoaCju3r0LALh58yYGDhwIZ2dnODo6YujQoUhNTdWo8/fff8eAAQPg4+MDGxsbODs7IyQkBJGRkWr95s6di6CgIADAvHnz1OpNTExU9SltDtKePXsQFBSE6tWrw9bWFn5+fli6dClkMplav+KnHW/fvo1BgwahRo0asLe3R8+ePXH16tWyX6Rixo4dC0EQcPfuXSxevBiNGjWCra0tXnnlFfz5558AgOfPn+Ozzz5T5W/ZsiUOHDigdX+5ubkICwtDs2bNYGtrCycnJ4SEhOD06dNq/QRBwIkTJ1T/r/zSdipVn4z//vsvhg8fDjc3N1hbW6NevXqYNm0aHj9+rLX/6dOnERgYCHt7e9SsWRMjRoxAcnKyrt8+NeHh4QgICICDgwMcHBwQEBCgdW5c8dOwUVFRePXVV+Hk5FTm6Ubla37v3j3cu3dP7XtW8nSurnUY2p+Q8tARJEJKWLVqFSZNmgQnJyf069cPbm5uuHjxIr7++mtERkYiMjISVlZWqr4TJ06Eh4cHBg0ahJo1a+LRo0c4f/48du7ciSFDhsDf3x9Tp07F8uXL4efnh4EDB6qeS5d5GFlZWejSpQtq1aqFMWPG4NatW9i7dy9iY2Px999/o2vXrmjTpg3GjRuHS5cuYceOHcjMzERERITafiZNmgQ/Pz/07NkTrq6uePDgAXbt2oWePXvir7/+woABAwC8mFOTmJiIdevWITAwEN27d1ftw8nJqcxaly5dihkzZsDZ2RmjRo2Cvb09du/ejRkzZuDUqVP466+/NH6BJiYmokOHDmjWrBnGjRuHO3fu4O+//0ZQUBBu3rwJd3f3cr9HStOnT8e5c+fQr18/SKVS/Pnnnxg1ahRq1KiBFStW4MaNGwgNDUVBQQE2bdqEAQMG4ObNm/D19VXtIzMzE926dcP169fRuXNnvP/++8jJyVHVtG3bNtVrGBYWhrVr1+LevXsICwtT7cPf39/gjKdPn0ZISAieP3+OoUOHwsfHB9HR0Vi+fDn27t2Ls2fPqh29O3bsGHr37g2JRIIRI0agdu3aOHbsGDp37owaNWro/L0DgClTpmDFihWoU6cOxo8fD+DFHwBvv/02rly5guXLl2s8JioqCt988w2CgoLw7rvvIikpqdT9Ozk5ISwsDD/88AMAYNq0aaptxd9n+tZhSN2ElIsRQlSuX7/OLCwsmJ+fH8vIyFDbtnDhQgaALV68WNXWunVrZmVlxVJTUzX2VfzxCQkJDAAbM2aM1ucNDw9nAFh4eLhaOwAGgH300Udq7R988AEDwJycnNgPP/ygalcoFKxPnz4MALt06ZLaY+7evavxvCkpKax27dqsYcOGau2RkZEMAAsLC9Nab1hYGAPAIiMjVW23b99mFhYWzM3NjSUlJanaCwoKWJcuXRgAtn79elW78nsCgC1atEht/59//jkDwBYuXKj1+UsaM2YMA8AaNWrE0tLSVO3nzp1TfZ+6dOnC8vLyVNu2bNnCALAPP/xQbV+jRo1iANhvv/2m1p6amso8PT2Zq6sry8/PV7UHBgay0j5K9c0ol8uZr68vA8AOHjyo1n/mzJkMABs3bpxa//r16zNBENipU6dU7QqFQpVD14/5EydOMACsadOmLDs7W9WemZnJGjVqxACwkydPqtqV7xEA7Pfff9fpOZS8vb2Zt7e3UerQt395P4uEKNEAiZBipkyZovGBqiSXy5mrqytr06aNqq1169bM3t6eZWZmlrnfigyQHBwc2NOnT9XaT548yQAwX19fplAo1LatX79er19aH374IQPAEhMTVW2GDJDmz5/PALBvv/1Wo/+ZM2cYABYcHKxqU35P6tWrx+RyuVp/5bbBgwfrlEE5QFq3bp3Gtvr16zMA7MSJE2rtMpmMWVpasm7duqna0tPTmVQqVauzuB9//JEBYHv27FG16TJA0jWj8nXt3bu3xr5yc3OZs7Mzs7GxYYWFhYyx/wYH/fr10+ifmJjIpFKpzgOkcePGMQBsy5YtGts2btyoMThTvkdat26t0/6LK2uApG8d+vanARLRFZ1iI6SYs2fPAgAOHTqEY8eOaWy3tLREbGys6t8jR47ErFmz0Lx5c4waNQpBQUHo0qULqlWrZrSaGjZsCDs7O7U25Wq3li1bapyyUm5LSUlRa7979y4WLlyIiIgIPHjwAIWFhWrbU1JS4O3tbXCdV65cAaB+qkSpY8eOsLGxQUxMjMY2f39/SCTq0yHr1q0LAMjOztarhpKntoAX34+7d+9qbJNKpXBzc1P7Pl24cAFyuRyFhYVaL3EQHx8PAIiNjUXfvn31qkuXjGV9Dx0cHNC2bVscPnwYcXFxaNGihWoOU9euXTX6e3t7w9PTUzVvrDxlPbdyTpq2169du3Y67V9X+tZhaN2ElIcGSIQUk5mZCQD4+uuvder/8ccfo2bNmli1ahWWLFmCxYsXw8LCAqGhoVi2bJlqcnZFaBtsWVhYlLutqKhI1Xb79m20b98eOTk5CAoKQr9+/VCtWjVIJBIcP34cJ06c0Bgw6SsnJwcAtM4ZEgQB7u7uePDggca2sjLI5XK9ajDke1X8+6R8/c+cOYMzZ86U+jxPnz41Wl3FM5b1PQT+G/wq+z158gQA4ObmprW/u7u7zgOknJwcSCQSuLq6at2PIAiq5y25zZj0rcPQugkpDw2QCClG+YssJycHjo6O5fYXBAHjxo3DuHHj8PjxY5w6dQqbN2/G1q1bER8fj2vXrkEqlVZ22eVatmwZsrKy8Mcff+DNN99U2/b++++rVmJVhPJ7l5qaqnEkijGG1NRUox5ZqwzK+mbMmIHFixeb7Pm1rUIEgEePHqn1U14aIi0tTWv/0vZT2nMrFAqkp6drDLjS0tLAGNP6+hn7Ipn61mFo3YSUh5b5E1JMQEAAgP9OtemjZs2aGDhwILZs2YLg4GDcuHEDt2/fBgDVIEnfIyLGcufOHQBQrVRTYoxpPVJiSL2tWrUCAK1L/8+dO4eCggKtp8DMSbt27SAIAqKjo3V+jDFf27K+h0+fPsXFixdha2uLxo0bAwD8/PwAAKdOndLof+/ePb2W+pf13Mo2Y71+Uqm01O+XvnVUZd3k5UIDJEKKmThxIiwsLPDhhx9qXa6cnZ2tmvMAvPgAZoyp9SkqKlKdqrGxsQEA1KhRA4IgGHxtmopSHtEpeR2fRYsW4d9//9Xo7+zsDAB61Ttq1ChYWFhg6dKlavN6nj9/jk8++QQAzP5WK7Vq1cLw4cMRFRWF77//XuO1BV4M9opfD8uQ71VpOnfuDF9fXxw4cABHjx5V27ZgwQI8fvwYr7/+uuoyE126dEG9/2vnfkLZ/+M4gD8nsxnTGhdLlpY4MEUTyVZLyUf+1P7kIi6KwzioJTc3lJMaOSAuDhyEk/JnF0vjsAMtRaJ24CIl2eH1O/yy7PMdX37q+/uW56N2e+/zeb3a5dk+n9errAzb29tpv62IYHx8/Euhra+vD8C/u6/ePpJ6eHjAxMRE2pnvMpvNuL+/x/Pz87fr+JN108/CR2xEb1RVVSEUCmFoaAgVFRVQFAU2mw2Pj4+4vLzE4eEh+vv7MT8/DwDo7u5GQUEBGhoaYLVakUwmsbu7i7OzM3i93lQwyc/Ph8PhQDgcRm9vL8rLy5GVlYXe3t5vvRj9WYODg1haWoLH44Hf70dhYSEikQhOT0/R3t6OnZ2dtPOVlZWwWCxYW1uDTqdDSUkJNBoNAoHAuxu/bTYbpqamMDo6CrvdDr/fj7y8PGxtbSEej6Orq+uXx3t/o1AohHg8jmAwiNXVVTQ2NsJkMuHm5gbRaBQXFxdIJBKpF+fdbjfW19fh8XjQ1tYGvV6PmpoadHR0fPneWVlZWF5eRmtrKxRFgc/ng9VqxdHREQ4ODmCz2TA5OZl2fmFhAYqioKWlJbUHaW9vD4lEAna7HbFY7FP3djqdCAQCmJ2dRVVVFTweD0QEGxsbuL29xfDwMJxO55d7ysTtdiMajaKtrQ3Nzc3IycmB0+lMfb5Sx5+sm36Y/2l6juivdnx8LD09PWKxWESr1UpRUZHU1tbK2NiYnJ+fp86FQiHp7OwUq9Uqer1eCgsLpb6+Xubm5uTl5SXtmvF4XBRFEZPJJBqNJm1M/qMxf5fL9Ut9H40qvzeiv7+/L01NTWI0GsVkMomiKHJycpJxZF9EJBKJiMvlEqPRmNp3c3V1JSKZx/xfbW5upr6n0+mkurpaZmZmJJlMfrqHj3rP5HXM/7W+tz4aw39v3Pzp6Ummp6elrq5O8vLyJDc3V8rKyqS7u1tWVlbSekkmkxIMBqW0tFSys7PTevqvPcZiMfF6vVJUVCRarVasVquMjIzI3d1dxuuEw2FxOp2Sm5srZrNZfD6fXF9ff9j7exYXF8XhcIjBYBCDwSAOhyPjyojfrYL4yOPjowwMDEhxcXFqFYH6Op+t46vnOeZPn6URyfAfMhEREdEPxneQiIiIiFQYkIiIiIhUGJCIiIiIVBiQiIiIiFQYkIiIiIhUGJCIiIiIVBiQiIiIiFQYkIiIiIhUGJCIiIiIVBiQiIiIiFQYkIiIiIhUGJCIiIiIVP4BvgFKl4QFbGsAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "exp='cifar10'\n", + "file_name = 'res-sophIA-cifar10.json'\n", + "name = 'cifar10'\n", + "plot_all_error_bar(folder, file_name, name, exp, meas_calc_list, to_save=do_save)\n", + "# plot_evolution(folder, file_name, name, exp, meas_calc_list, to_save=do_save)\n", + "\n", + "new_file_name = 'res-sophIA-cifar10_dyn-EPM.json'\n", + "total_to_dynamic_EPM(exp,folder, file_name, new_file_name, meas_calc_list, idle_power)\n", + "plot_all_error_bar(folder, new_file_name, name+'_dyn-EPM', exp, meas_calc_list_2, to_save=do_save)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "e764495e", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkgAAAHPCAYAAACoQyVSAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAACcmUlEQVR4nOzdeVwU9f8H8NfsIYeAiIBCcgkKKiqmiSeXIoLmkWdaalrWV80jTdMytCzt65Vdln0Vb00xywxv8ATFUtQ0kQSEPCGQQ2Bld+f3h7/dXHYXdocFdmfez8eDh/qZY98vZmU/zHw+MwzLsiwIIYQQQoiaqKELIIQQQggxN9RBIoQQQgipgjpIhBBCCCFVUAeJEEIIIaQK6iARQgghhFRBHSRCCCGEkCqog0QIIYQQUgV1kAghhBBCqqAOEiGEEEJIFdRBIqQBMAxj9FdYWFid1LJ48WIwDIPFixebZH/Z2dlgGAbe3t4m2Z8QTJw4UX2cg4KCql33woULGu+LM2fOaCxXHU+GYTBo0CC9+9m2bZve95Vq+xMnTmgtk8lk+OKLLxASEgInJydIpVI4Ozujbdu2GDVqFNauXYu8vDytWoz50vW6hNQ3SUMXQIgQTZgwQavt/v37OHz4sN7lAQEBdV4XaXiXL1/G77//ji5duuhcvmHDBoP39euvv+LUqVMICQkxSW0PHjxAZGQkrl69CrFYjG7dusHDwwNKpRI3b97E3r17sWfPHvj6+mLQoEEICgrS+V4+dOgQHjx4gE6dOunsELZo0cIk9RJSG9RBIqQBbNq0SavtxIkT6g6SruV1Zfr06RgzZgycnZ1Nsr/nnnsOf/75J6RSqUn2JyRdu3bFb7/9ho0bN+rsIJWXl2PXrl1wc3ODWCzG33//rXdftra2KCsrw/z585GSkmKS+qZPn46rV6+iffv2+PXXX+Hl5aWx/OHDh9i5cyeaN28OABg6dCiGDh2qtZ+wsDA8ePAAQ4cONdmZS0JMjS6xESJwzs7OCAgIMFkHSSqVIiAgAL6+vibZn5AMHDgQzZs3x86dO1FRUaG1PD4+HkVFRRg/fjzEYnG1+xo2bBg8PDxw7tw57Nu3r9a1VVRU4OeffwYArF69WqtzBACurq6YOXMmXnjhhVq/HiENjTpIhFiAZ8cJ5eTkYPLkyfDw8IBUKsXEiRPV6/344494/fXXERgYiKZNm8La2ho+Pj6YNGkS0tPTa9z3szZt2gSGYTBx4kQ8fvwYCxYsgJ+fH6ysrNCiRQtMmDABd+7c0dpfdWOQVGNMAGDv3r3o3bs3HBwc0LhxY/Tq1QsJCQl6vwe3b9/GxIkT0aJFC1hbW6N169aIjY1FRUUFwsLCOI1dkcvl+Pbbb9GzZ080adJEvd8ZM2bozFbbDDWRSCR49dVXUVhYqLNTs3HjRgDApEmTatyXtbU1PvroIwDAwoULoVAoONcFAAUFBaisrATwtCNECN9RB4kQC5KRkYHOnTsjISEBwcHBGDx4sMaZn1GjRmHnzp2wsbFBREQEoqKiIBKJEBcXhy5duiA5Odno1ywqKkLPnj3x7bffol27doiOjgbLstiyZQt69eqFoqIio/cZGxuLkSNHAgBiYmLQunVrJCcnY9CgQTo7BtevX0fXrl2xefNmiMViDBkyBP7+/li1ahUiIyPVH9zGkMlkiI6Oxn/+8x9cunQJvXr1wtChQyGTyfDll18iKCgIFy9eNFkGQ6k6P6rOkMqtW7dw8uRJ9OrVC23atDFoX+PHj0dgYCBu3LihtT9jOTs7w9bWFgDw5ZdfQqlU1mp/hJg9lhBiFpKSklgArK7/lrGxseplr7zyCltRUaFzH7t27WJLS0s12pRKJfv111+zANj27duzSqVS575jY2M12uPi4tSvGRUVxRYVFamXFRQUsEFBQSwA9tNPP9XYLisriwXAenl5adWn2p+joyN77tw5nXW0adNGa7vnn3+eBcCOGTNGI/vff//N+vv7q/eblJSk8/uiy/z581kArK+vL5uVlaVuf/LkCTt58mQWAOvj48PKZDKTZKjOhAkTWADsxx9/zLIsy/bo0YMViUTs7du31eu8//77LAB248aNLMuyrJeXFwuAPX36tM4aJk+ezLIsy+7fv58FwD733HNsWVmZer2tW7eyANjQ0FCtevR9P2fOnKle5u3tzb799tvs1q1b2WvXrmm9r6oTGhqq8z1HiDmhM0iEWBAnJyd89dVXsLKy0rl89OjRaNy4sUYbwzCYOnUqevTogWvXruHPP/806jUbN26MuLg4ODg4qNuaNm2K9957DwBw7NgxI1MAH330EYKDgzXaFixYgCZNmuDmzZvIzc1Vt58+fRoXL16EnZ0dvv76a43szz33HFatWmX061dUVODrr78GAKxZs0bjcqBUKsUXX3yB5s2bIysrC/Hx8bXOYKxJkyZBqVQiLi4OAKBUKrF582bY2dlh1KhRRu3rxRdfRJ8+fXDnzh2sXbuWc00AsGLFCsyaNQtSqRTZ2dn48ssv8eqrr6J9+/ZwdXXF9OnT9V6aJMTSUAeJEAvSr18/NGnSpNp1/vrrL3z11VeYNWsWJk+ejIkTJ2LixIl48OABAOgdi6RP165d4ebmptXetm1bAOD0gfjiiy9qtVlZWaFVq1Za+zx58iQAYMCAAXByctLabuDAgXB0dDTq9X/77TeUlpbCyclJZy22trYYM2YMACApKanWGYyl6uhu2rQJLMvi8OHD+PvvvzFq1CitDrAhPvvsM/WfBQUFnOuSSqVYs2YNcnJysG7dOowdOxYBAQFgGAb5+fn4+uuv0bFjR/z++++cX4MQc0HT/AmxINXdfFGhUGD69On47rvvwLKs3vWKi4uNek1PT0+d7aozSrpmW5lyn6qp7NVl9/LywqNHjwx+fVXnxcfHR+86qll4+jo6dfF9UbG3t8eIESOwefNmJCYmGjU4W5cePXpg6NCh+Omnn/Dpp59i5cqVnGsDnt6n6K233sJbb70F4On9kXbs2IElS5agoKAA48ePx7Vr12r1GoQ0NDqDRIgFsbGx0bts7dq1+Pbbb9G8eXPs2LED2dnZKC8vB8uyYFkWL7/8MgBU23nSRSQy/Y8JLvtUzRwzdlldqYvvy7NUnaEVK1Zg//798Pf3R69evTjv79NPP4VYLMbXX39dq8t/ujRv3hyzZ89WXxK8fv06MjIyTPoahNQ36iARwhO7d+8GAHz33Xd4+eWX4eXlBWtra/VyS/3Aeu655wA8vX2APrdv3+a0z6ysLL3rZGZmaqxb30JCQuDn54fDhw/jyZMneO2112q1v7Zt22LixImoqKjAhx9+aKIqNfXv31/99/z8/Dp5DULqC3WQCOEJ1dgSXTfwu3btGtLS0uq5ItNQPSbj0KFDKCws1Fp+8OBBne3V6dq1K+zs7FBQUID9+/drLVfdsRoAwsPDOVRtGm+99RaaNWsGV1dXjB8/vtb7W7JkCWxsbLBlyxajL4EZcuYxJydH/feG6lgSYirUQSKEJ1SDpr/++muNe9Tcu3cP48ePh1wub6jSaiUkJASdOnVCSUkJ3n77bTx58kS97O7du5gzZ47R+7S2tsa0adMAAHPmzNE4A1VZWYmZM2fi/v378PHxwYgRI2ofgqM5c+YgPz8fDx480DlQ3ljPPfcc3n77bSiVSnzxxRdGbVtUVITnn38eW7duRWlpqdbyzMxM9WXBnj176h2jRYiloEHahPDEwoULcejQIXz//fdISkrC888/j+LiYpw8eRKtWrXCsGHDTPLIifrGMAy2bduG0NBQbN++HSdOnECvXr1QVlaGpKQkBAUFoUePHkhJSUGjRo0M3u+SJUvw22+/4fjx42jbti3Cw8Nhb2+PlJQU5OTkoFmzZtizZ49R+7QECxYswPfff2/0WTcAuHTpEsaPHw8rKyt06tQJXl5eYFkWubm5uHDhApRKJby8vOr1WYKE1BU6g0QITwQHB+O3337D4MGD8fjxY+zfvx+3bt3C22+/jZSUFI37GFmawMBA/P7773j11VdRWVmJn376CX/++SdmzpyJo0ePqm9hYMzz5KysrHDo0CF888036NSpE06fPo19+/ZBKpXi7bffxuXLl3U+MNbSOTo6YsGCBUZv16RJE5w/fx6ffvopQkNDUVhYiEOHDuHnn39GVlYWQkNDsXr1aly7dg2tW7eug8oJqV8Ma+yUFkIIMSNZWVnw8/ODvb09CgoK6nx2GSFEGOgnCSHE7D1+/FjnoOLbt29j3LhxUCqVmDBhAnWOCCEmQ2eQCCFmLzs7Gz4+PvD19UWbNm3g4OCAnJwcXLx4ETKZDJ06dcKpU6cs+jIiIcS8UAeJEGL2SktLsWTJEiQmJiInJwePHj2Cra0t/P39MXz4cLz99tvqJ80TQogpUAeJEEIIIaQKumBPCCGEEFIF3QdJD6VSibt378Le3r5BnvNECCGEENNiWRYlJSVwd3evcVIHdZD0uHv3Ljw8PBq6DEIIIYSYWG5uLlq2bFntOtRB0sPe3h7A02+iOc2MUfV+hXRmizJTZr6izJSZr8w1c3FxMTw8PNSf8dWhDpIeqgPq4OBgVh2kyspKnDhxAjExMZBKpQ1dTr2gzJSZrygzZeYrc89sSKeNBmkTQgghhFRBHSRCCCGEkCqog2SBJBLhXRmlzMJAmYWBMguDpWemG0XqUVxcjCZNmqCoqMisxiARQgghhBtjPtvpDJKFUSqVePjwIZRKZUOXUm8oszBQZmGgzMLAh8zUQbIwCoUCKSkpUCgUDV1KvaHMwkCZhYEyCwMfMlMHiRBCCCGkCuogEUIIIYRUUash5rdv38bff/+N/Px82NrawsXFBQEBAbC2tjZVfaQKhmHM7s6kdY0yCwNlFgbKLAx8yGz0LLakpCRs2rQJx48fx71797SWS6VSdO3aFcOGDcPEiRPRrFkzkxVbn2gWGyGEEMIvxny2G9xB2r17N2JjY3Hz5k2wLAsPDw907doVzZs3h5OTE8rLy1FQUID09HRcunQJMpkMVlZWeOWVV/DRRx/Bzc3NJOHqi7l2kJRKJXJzc+Hh4VHjk4j5gjJTZr6izJSZr8w1szGf7QZdYuvevTtSU1Px/PPPY9WqVRg5ciSee+45vetXVlbi1KlT2LZtG3bv3o1du3Zhy5YtGDZsmHFJiBaFQoG0tDS4u7ub1ZuuLlFmymyuWJZFWVkZt23lcjy+cA63zpxGk379IO34PCAWG70fW1tbi7qMYYnHubYos2VmNqiD1KhRIxw7dgwREREG7VQqlaJv377o27cv1qxZg1WrViEnJ6dWhRJCiLkpKyuDnZ2d0dsNdmmK/7bxREtrKwQDwJmj+LtChnk3c7A/r9CofZWWlqJx48ZG10AIqZ5BHaRTp05xfgFHR0d8/PHHnLcnhBA+GezSFNs6+Gm1u1s1wrYOfnjl6l9Gd5IIIaZn2Q9KESCGYeDi4mJRp9RrizILgyVmtrW1RWlpqeEbKBTAG2OBf/K0Fon+P/eOiJ7A+h0GX26ztbU1/PXNgCUe59qizJaJnsWmh7kO0iaEWC5F2u+omDu1xvWsV34DcVCXeqiIEGEx+SBtXfLy8hAXF4cLFy7g0aNHOm8nzjAMjh8/zvUliA4KhQIZGRlo3bo1xBwGdFoiykyZ+YItyDfpepZICMe5KspsmZk5dZCuXLmCiIgIFBYWoroTUJZ8as1cKZVKpKenw9fX12LfdMaizJSZLxgnZ5OuZ4mEcJyrosyWmZlTB2nOnDkoKCjABx98gMmTJ6Nly5YW+w0ghBCujJ7m36o10MxF5xgkNWcXVLRqDTx+bNAuLW2aPyGWglMHKSUlBUOHDsVHH31k6noIIcRicJnm/+wsNtEzHRvl/5+NfyUxGfubNDF4fzTNn5C6wenuTY0aNYKvr6+payEGEIlE8PT0tNgbb3FBmYVBKJn35xXilat/4a7siUb7HdkTQUzxF8pxfhZltkycZrENGzYMjx49QlJSUl3UZBZoFhshpCa1vZN2xaXfwDwqgFVzNzDtOwriTtqENCRjPts5de1WrlyJP/74AytXruRUIOFOoVDg0qVLOmcN8hVlFgZLzMwwDBo3bszpy65JEzTtE4YcLz/YdOuBxg4OnPZjaZ0jSzzOtUWZLZNBY5AmTZqk1RYYGIj58+fj22+/RVBQkM6eGMMw2LBhQ+2rJGpKpRI5OTkIDAwUzMB4ykyZ+YoyU2a+4kNmgzpImzZt0rssMzMTmZmZOpdRB4kQQgghlsigDlJWVlZd10EIIYQQYjYM6iB5eXnVdR3EQCKRCP7+/hY9M8BYlFkYKLMwUGZh4ENmg2exxcXFISIiQjCdJZrFRgghhPBLncximzx5Mlq1aoVWrVph8uTJ2LZtG+7cuVPrYolx5HI5kpOTIZfLG7qUekOZhYEyCwNlFgY+ZDa4gzRjxgx06NABt2/fRlxcHCZMmABPT0+0adMGb775Jn744Qc8ePCgLmsleHrflby8vGqfgcc3lFkYKLMwUGZh4ENmgx818vnnnwMACgsLcfLkSSQlJeHEiRP4448/8Ndff+F///sfACAgIADh4eEIDw9HWFgYmjVrVieFE0IIIYTUFaOfxda0aVMMHToUQ4cOBQAUFBTgxIkT6g7T9evXcePGDaxbtw4ikQiVlZWmrpkQQgghpE7Veni5k5MTXnrpJXz55Zf4/fff8cMPP6B9+/ZgWRZKpdIUNZJniMViBAUFWeyNt7igzMJAmYWBMgsDHzLXqoMkl8tx9uxZLF26FH379oWjoyNGjx6NP/74AwEBAZgyZQqn/cpkMsyfPx/u7u6wsbFBcHAwjh49WuN23t7eYBhG51fr1q051WJuRCIRvLy8LHrqpLEoszBQZmGgzMLAh8xGXWJTKpW4cOECkpKSkJSUhLNnz6K8vBwA0K5dO0yePBmhoaEICQmBq6sr56ImTpyI+Ph4zJo1C61bt8amTZsQExODpKQk9O7dW+92n3/+OUpLSzXabt++jQ8++AD9+/fnXI85kcvlOHXqFEJCQiCRGH2F1CJRZsrMV5SZMvMVHzIbXHVMTAzOnj2L0tJSMAyDDh064PXXX1d3iEw1GDs1NRW7du3CihUrMHfuXADA+PHjERgYiHnz5iE5OVnvtqpxUc9aunQpAGDcuHEmqa+hsSyLkpISi54ZYCzKLAyUWRgoszDwIbPBHaRDhw5BJBLhpZdewocffogOHTrUSUHx8fEQi8Ual+esra0xefJkLFy4ELm5ufDw8DB4fzt27ICPjw969uxZF+USQgghhIcM7iANHjwYp0+fxt69e/Hjjz/Cx8dHPZU/NDQULVu2NElBly5dQps2bbTucNmtWzcAQFpamsEdpEuXLuHPP//E+++/X+O6MpkMMplM/e/i4mIAQGVlpXomnkgkglgshkKh0BiArmqXy+UavWWxWAyRSKS3veoMP9VpyKo31nq2XbVNZWUlpFIplEolFAqFel2GYSCRSPS266u9ITM9S1cm1TpKpVJj/5acyZDjBMCs3numyFTdcVLVpFAoIJVKeZGpanvV2lXrVK3FkjMZepxUf/Ipk77aVctZlgXLsrzIVFO7iq6f2Q2ZyZiZ9QZ3kH766SewLIu0tDT1lP74+Hhs2LABDMPA29sbYWFh6i9jzvI86969e3Bzc9NqV7XdvXvX4H1t374dgGGX15YtW4YlS5ZotR85cgS2trYAAE9PT3Tu3BlXrlxBTk6Oeh1/f38EBAQgNTUVeXl56vagoCB4eXnh1KlTKCkpUbf36NEDrq6uOHLkiMabKjw8HDY2NkhISNCoISYmBuXl5UhKSlK3JSUlYeDAgcjPz0dKSoq63d7eHhEREcjNzUVaWpq63cXFBT179kRGRgbS09PV7eaUSSKR6MxkZ2eHHj164O7du7hy5QovMtV0nFQPiFZNTuBDJkOPU1ZWFtq2bcurTPqOU8eOHdGjRw8kJydrjJ+05Ew1HSdVjar3Nh8yGXqcVJed+JRJ33GKjo5G27ZtNSZYmUMm1ckPQxj8LDZdlEolLl26hMTERJw4cQJnzpxBSUmJusMUHh6uvoGkoXx9feHv7691EDIzM+Hr64s1a9Zg1qxZBtXm6ekJV1dXXLx4scb1dZ1B8vDwQH5+vvpslrn10Pn4WwdlokyUiTJRJspUV5mKi4vh7Oxs2HNWWRNSKBRsfHw826FDB5ZhGFYkEhm9j/bt27MRERFa7deuXWMBsN9++61B+0lMTGQBsCtXrjS6BpZl2aKiIhYAW1RUxGn7uvLkyRP2wIED7JMnTxq6lHpDmYWBMgsDZRYGc81szGd7refe3bt3T33JLSkpCZmZmRq9NmO5ubnpfAjuvXv3AADu7u4G7Wf79u0QiUR4+eWXja7B3Fnyw/+4oszCQJmFgTILg6VnNrqD9ODBA41Hi2RkZAB4em2VYRh06tRJ/Sy2kJAQowsKCgpCUlISiouLNU5/nT9/Xr28JjKZDHv37kVYWJjBHSpCCCGEEBWDO0hTp07FiRMn1AOoVB2iwMBAhIWFITw8HKGhoWjatGmtChoxYgRWrlyJ9evXq++DJJPJEBcXh+DgYPXg75ycHJSVlSEgIEBrHwkJCXj06BFv7n1ECCGEkPpl8CBt1e3C27Ztqz5DFBYWZrIbRD5r1KhR2LdvH2bPng0/Pz9s3rwZqampOH78uPqsVFhYGE6ePKnzJlQjRozAgQMH8ODBAzRp0oRTDcXFxWjSpIlhA7nqEfv/syDs7e3BMExDl1MvKDNl5ivKTJn5ylwzG/PZbvAZpJ07dyIsLAzNmzevdYE12bJlCxYtWoStW7eisLAQHTt2xIEDBwy6ZFdcXIxff/0VAwcO5Nw5Mnc2NjYNXUK9o8zCQJmFgTILg6VnNvgpcqNHj66XzhHw9M7ZK1aswL1791BRUYHU1FRERUVprHPixAmdZ48cHBxQXl6OvXv31kut9U0ulyMhIcHiB78ZgzILA2UWBsosDHzIbFAH6dkbNHGla2YaIYQQQog5MqiD1Lp1a0ybNk19d19DVVZWYufOnWjfvj02bNjAqUBCCCGEkPpmUAfps88+ww8//AA/Pz+Ehobiyy+/xIULF3Q+0+Tvv//G3r178dZbb8HNzQ2vvPIKvLy8MHbsWJMXTwghhBBSFwyexfbo0SOsXr0aGzZswL1798AwDEQiERwdHeHo6IiKigoUFBSgoqLi6Y4ZBlFRUZgzZw4iIiLqNERdMOdZbHK5HBKJxKxmBtQlykyZ+YoyU2a+MtfMxny2G/0sNoVCgYMHD+L48eNITk7G33//jX/++Qc2NjZwcXFBhw4dEBoaiiFDhsDLy6tWQRqSOXeQzHHqZF2izJSZrygzZeYrc81szGe7wbPYVMRiMQYNGoQ1a9bg/PnzuHPnDioqKlBYWIibN29i7969mDFjhkV3jsyZXC5HUlKSRc8MMBZlFgbKLAyUWRj4kNnoDhIhhBBCCN9RB4kQQgghpArqIFkgicToZwxbPMosDJRZGCizMFh6ZqMHaQuFuQ7SJoQQQgg3dTpImzQspVKJhw8fQqlUNnQp9YYyCwNlFgbKLAx8yEwdJAujUCiQkpIChULR0KXUG8osDJRZGCizMPAhM3WQCCGEEEKqoA4SIYQQQkgVtRpinpqaigsXLuDRo0c6T6MxDINFixbV5iVIFQzDmN2dSesaZRYGyiwMlFkY+JCZ0yy2goICDB06FGfPnkV1mzMMY7HXH2kWGyGEEMIvxny2czqD9M477+DMmTMICwvDhAkT0LJlS4u/34GlUCqVyM3NhYeHB0QiYVwhpcyUma8oM2XmKz5k5tSrOXDgALp164bjx49b9OkzS6RQKJCWlgZ3d3eLfdMZizJTZr6izJSZr/iQmVPV5eXlCAkJoc4RIYQQQniJUwcpKCgI2dnZJi6FEEIIIcQ8cOogxcbGYv/+/Th37pyp6yE1YBgGLi4ugjp7R5mFgTILA2UWBj5kNmgW25YtW7Tafv75Zxw4cADjxo3D888/r3c0+Pjx42tfZQOgWWyEEEIIvxjz2W5QB0kkEmn1Aqtupms5TfM3PYVCgYyMDLRu3Rpisbihy6kXlJky8xVlpsx8Za6ZTT7NPy4uziSFkdpTKpVIT0+Hr6+vWb3p6hJlpsx8RZkpM1/xIbNBHaQJEybUdR2EEEIIIWbDMm9OQAghhBBShzh1kA4cOICXXnoJd+/e1bn87t27eOmll3Dw4MFaFUe0iUQieHp6WuyNt7igzMJAmYWBMgsDHzJzehZbdHQ07t69i8uXL+tdp3Pnznjuuedw4MCBWhXYUMx1kDYhhBBCuDHms51T1+7y5csIDg6udp3g4GCkpaVx2T2phkKhwKVLlyx2diAXlFkYKLMwUGZh4ENmTh2kgoICuLq6VruOs7Mz8vPzORUlk8kwf/58uLu7w8bGBsHBwTh69KjB2//www/o0aMHGjduDEdHR/Ts2ROJiYmcajE3SqUSOTk5UCqVDV1KvaHMwkCZhYEyCwMfMnPqILm4uCA9Pb3addLT0+Hk5MSpqIkTJ2L16tUYN24c1q5dC7FYjJiYGJw5c6bGbRcvXoyXX34ZHh4eWL16NZYuXYqOHTvizp07nGohhBBCiPAYNM2/qpCQEOzduxdXrlxBx44dtZZfvnwZ+/fvx0svvWT0vlNTU7Fr1y6sWLECc+fOBfD0btyBgYGYN28ekpOT9W577tw5fPTRR1i1ahVmz55t9GsTQgghhAAczyDNnz8fANC7d2989NFHSElJQU5ODlJSUrBkyRL06dMHIpEICxYsMHrf8fHxEIvFmDJlirrN2toakydPRkpKCnJzc/Vu+/nnn6NFixaYOXMmWJZFaWmp8eHMnEgkgr+/v0XPDDAWZRYGyiwMlFkY+JCZ0yw2ANi7dy8mTJiA8vJyjXaWZWFnZ4ctW7Zg6NChRu83MjISd+7cwfXr1zXajx8/jn79+mH//v148cUXdW7r4uKCnj17Ijw8HEuXLsU///yDFi1a4P3338f06dOrfV2ZTAaZTKb+d3FxMTw8PJCfn68e6S4SiSAWi6FQKDSuq6ra5XK5xiNYxGIxRCKR3vbKykqNGiSSpyf05HK5Qe1SqRRKpVJjEBzDMJBIJHrb9dVOmSgTZaJMlIky8T1TcXExnJ2dTfeoEV2GDx+OPn36YNOmTbhw4QKKiorg6OiIbt26YcKECXBxceG033v37sHNzU2rXdWm795LhYWFyM/Px9mzZ5GYmIjY2Fh4enoiLi4Ob7/9NqRSKd588029r7ts2TIsWbJEq/3IkSOwtbUFAHh6eqJz5864cuUKcnJy1Ov4+/sjICAAqampyMvLU7cHBQXBy8sLp06dQklJibq9R48ecHV1xZEjRzTeVOHh4bCxsUFCQoJGDTExMSgvL0dSUpK6TSKRYODAgcjPz0dKSoq63d7eHhEREcjNzdWYRajqPGZkZGiMH7OETHZ2drCxsUGLFi1w9epVXmSq6Tilp6cjIyODV5kMPU6tW7dGu3bteJVJ33Hq0KED7t+/j/Lyco0z3paciY/HyVSZ+vfvj8rKSl5l0necoqKicObMGRQVFZlVpuLiYhiK8xmkuuLr6wt/f3+tg5CZmQlfX1+sWbMGs2bN0touNzcXnp6eAIBdu3Zh9OjRAJ6OpO/QoQOKi4urvTxnKWeQKisrcfToUURGRsLW1tYse+jGZnqWrt865HI5jhw5ggEDBmicrrXkTDUdp4qKChw+fBiRkZGQSqW8yFTTcVK9t6OiomBtbc2LTFXbq9auVCpx6NAh9O/fX12XpWeq6TiVlZWpf4ZJpVJeZKrpOKne29HR0ZBKpbzIVFM7ACQkJKiPs7lkqpczSHXFxsZGo6OiUlFRoV6ubzvg6RttxIgR6naRSITRo0cjNjYWOTk56k5UVVZWVrCystJql0ql6oOrIhaLdT5879kfcIa0V92vMe2qv4tEIp3XePW166vdHDKpVJdJ1/qWnknfcVLt69n9WXomQ46T6u98yqRStXbVh5BEItFZjyVmqqldVWPV9zYfMtXUzjAMGIbhVSZ97ar3tq7P0IbMpK9+XWo1emr79u2IjIyEi4sLrKys4OLigsjISOzYsYPzPt3c3HDv3j2tdlWbu7u7zu2cnJxgbW2NZs2aaX0jVfdsKiws5FwXIYQQQoSDUwdJoVBg+PDhGD9+PI4fP47Hjx/D3d0djx8/xvHjx/Hqq69i+PDhnG4QFRQUhJs3b2pdJzx//rx6uS4ikQhBQUHIy8vDkydPNJapxi1xHRdlTsRiMYKCgnT2pvmKMgsDZRYGyiwMfMjMqYP0xRdfYN++fejVqxfOnj2LsrIyZGVloaysDMnJyejduzd++uknfPnll0bve8SIEVAoFFi/fr26TSaTIS4uDsHBwfDw8AAA5OTk4MaNGxrbjh49GgqFAps3b1a3VVRUYPv27WjXrp3es0+WRCQSwcvLy6KnThqLMgsDZRYGyiwMvMjMctCpUyfW39+fffLkic7lT548YQMCAthOnTpx2T07cuRIViKRsO+++y773XffsT179mQlEgl78uRJ9TqhoaFs1fLLysrY9u3bs1KplJ07dy77xRdfsC+88AIrFovZhIQEo2ooKipiAbBFRUWcMtSVyspK9vjx42xlZWVDl1JvKLMwUGZhoMzCYK6Zjfls59S1u3nzJgYPHlztYK0XX3wRN2/e5NRp27JlC2bNmoWtW7dixowZqKysxIEDBxASElLtdjY2NkhMTMTYsWOxceNGvPvuuxCJRPj1118RHR3NqRZzw7IsSkpKNEbo8x1lFgbKLAyUWRj4kJnTLLZGjRrh8ePH1a7z+PFjNGrUiFNR1tbWWLFiBVasWKF3nRMnTuhsd3V1xaZNmzi9LiGEEEIIwHEMUufOnbF79269N228d+8edu/ejeeff75WxRFCCCGENARON4r85ZdfMGTIELRo0QJz5sxBaGgomjdvjgcPHuDEiRNYvXo1Hjx4gJ9//hmDBg2qi7rrXHFxMZo0aWLQzaTqk1KpRH5+PpydnS178JsRKDNl5ivKTJn5ylwzG/PZzvlO2qtXr8Z7772ncTdM4Ol1R4lEgs8++wyzZ8/msmuzYK4dJEIIIYRwY8xnO+du3TvvvIMbN25g8eLFGDp0KCIiIjB06FB89NFHuHHjhkV3jsxZZWUlfv31V63bwPMZZRYGyiwMlFkY+JC5Vo8aadWqFRYtWmSqWoiBdD3zhu8oszBQZmGgzMJg6ZlNcmFQLpejsLDQ4r8ZhBBCCCFALTpICoUCa9asQadOnWBtbQ1nZ2dYW1ujU6dO+Pzzz6mzRAghhBCLxWmQdmlpKaKionDu3DmIRCJ4eHioZ7Hl5uZCqVSiR48eOHz4MBo3blwXddc5cx2krbr5lr29PRiGaehy6gVlpsx8RZkpM1+Za+Y6H6T94YcfIiUlBS+//DJu3bqFzMxMpKSkIDMzE7du3cKYMWOQnJyMDz/8kFMAUj0bG5uGLqHeUWZhoMzCQJmFwdIzc+og7d69G127dsW2bdvg6empsczT0xPbt29Hly5d8MMPP5ikSPIvuVyOhIQEQV3CpMzCQJmFgTILAx8yc+og/fPPP+jXr1+16/Tr1w8FBQWciiKEEEIIaUicOkitW7fGw4cPq10nLy8Pfn5+nIoihBBCCGlInDpIM2fOxA8//IBr167pXH716lXs2rULs2bNqk1thBBCCCENgtMstlOnTmHVqlU4cuQIJkyYgN69e6tnsZ0+fRpbtmxBVFQU3nnnHa1tQ0JCTFJ4XTPnWWxyuRwSicSsZgbUJcpMmfmKMlNmvjLXzHX+LDaRSASGYaDa9NnwutqeVfXZbebKnDtI5jh1si5RZsrMV5SZMvOVuWY25rOd06NGPvzwQ7MKLCRyuRxJSUmIiYmBVCpt6HLqBWWmzHxFmSkzX/EhM6cO0uLFi01cBiGEEEKI+TDJs9gIIYQQQviE0xkklUuXLmHnzp24ceMGysrKcOzYMQDA7du3cf78efTr1w9OTk4mKZT8SyKp1WGzSJRZGCizMFBmYbD0zJwGaQPAvHnzsGrVKo1B2aoB2NnZ2fDz88OqVaswc+ZM01Vbj8x1kDYhhBBCuKnzZ7HFxcVh5cqVGDRoEK5cuYIFCxZoLPf29ka3bt2wf/9+Lrsn1VAqlXj48CGUSmVDl1JvKLMwUGZhoMzCwIfMnDpI33zzDdq2bYu9e/ciMDAQjRo10lonICAAGRkZtS6QaFIoFEhJSbGY2yWYAmUWBsosDJRZGPiQmVMH6fr164iMjKz2+mLz5s1rfBwJIYQQQog54tRBkkgkePLkSbXr3L17F3Z2dpyKIoQQQghpSJw6SB06dEBiYqLeU2eqGW1dunSpVXFEG8MwZndn0rpGmYWBMgsDZRYGPmTm1EGaNGkSbt68ibfeegsymUxjWXFxMSZOnIj79+/jjTfeMEmR5F8SiQQREREWP33SGJRZGCizMFBmYeBDZs4dpDFjxmDDhg1wcXHBhg0bAADdunXDc889h/j4eEyYMAEjRowwabHk6cyA27dvW/TMAGNRZmGgzMJAmYWBD5k530l7x44d+O677+Dj44M7d+6AZVn89ttv8PT0xLp167Bx40ZT1kn+n0KhQFpamkXPDDAWZRYGyiwMlFkY+JC5Vue+3njjDbzxxhsoLy9HYWEhHBwcaGA2IYQQQiyeSZ7FZmNjA3d3d5N1jmQyGebPnw93d3fY2NggODgYR48erXG7xYsXg2EYrS9ra2uT1EUIIYQQYTDL0VMTJ05EfHw8Zs2ahdatW2PTpk2IiYlBUlISevfuXeP269at0+isicXiuiy3XjEMAxcXF4ueGWAsyiwMlFkYKLMw8CEz52ex1ZXU1FQEBwdjxYoVmDt3LgCgoqICgYGBcHV1RXJyst5tFy9ejCVLliAvLw/Ozs61qoOexUYIIYTwS50/i60uxcfHQywWY8qUKeo2a2trTJ48GSkpKcjNza1xHyzLori4GGbW9zMJhUKBGzduWPTAN2NRZmGgzMJAmYWBD5nN7hLbpUuX0KZNG62eXbdu3QAAaWlp8PDwqHYfrVq1QmlpKRo3boyhQ4di1apVaN68ebXbyGQyjXs6FRcXAwAqKytRWVkJABCJRBCLxVAoFBpTF1Xtcrlco1MmFoshEon0tqv2q6K6X4RcLtfbXllZifT0dHh6esLW1hZKpVLjDcgwDCQSid52fbU3ZKZnSaVSrdrlcjnS09Ph4+OjUaMlZ6rpOD17nKVSKS8y1XScVJm9vb15k6lqe9XalUol0tPT4eXlpXG/GEvOVNNxkslkGu9tPmSq6Tip3tutWrVS78fSM9XUDkDjOJtLpqq5qmN2HaR79+7Bzc1Nq13VdvfuXb3bNm3aFNOnT0ePHj1gZWWF06dP4+uvv0Zqaip+++23ak+nLVu2DEuWLNFqP3LkCGxtbQEAnp6e6Ny5M65cuYKcnBz1Ov7+/ggICEBqairy8vLU7UFBQfDy8sKpU6dQUlKibu/RowdcXV1x5MgRjTdVeHg4bGxskJCQoFFDTEwMysvLkZSUpG5LSkrCwIEDkZ+fj5SUFHW7vb09IiIikJubi7S0NHW7i4sLevbsiYyMDKSnp6vbzSmTRCLRmUk1nuzOnTu4evUqLzLVdJxu3boFAOrJCXzIZOhxunXrFtq3b8+rTPqOU4cOHQAAycnJKC0t5UWmmo6TqkbVe5sPmQw9TnK5HBUVFbzKpO849e/fHwA0JliZQybVyQ9DcBqDtGXLFjRv3hxRUVHGblojX19f+Pv7ax2EzMxM+Pr6Ys2aNZg1a5bB+9uxYwfGjRuHZcuW4b333tO7nq4zSB4eHsjPz1d3rMyhh15ZWYmjR48iMjJSUGeQjhw5ggEDBkAk+veqsCVnquk4VVRU4PDhw4iMjBTUGaSjR48iKioK1tbWvMhUtV3XGaRDhw6hf//+gjmDVFZWpv4ZJqQzSEePHkV0dDSkUikvMtXUDgAJCQnq42wumYqLi+Hs7GzQGCROZ5AmT56M6dOn10kHycbGRuvxJcDTgdqq5cYYO3Ys5syZg2PHjlXbQbKysoKVlZVWu1QqVR9cFbFYrHNmnL5bqutrr7pfQ9pFIhE8PT3VtYpEIo1Og4q+dn21N2SmqqrWrsoskUiMqt2cM9XULpVK1cf52QyWnKmm46Q6zqrX4kOmqqrWrlAo4OnpiUaNGulc3xIz1dRuZWWl871tyZlqql313haLxWAYhheZampXvberHmegYTPpq1/nfgxe8xlubm46e4um4Obmhjt37mi137t3DwDg7u5u9D49PDxQUFBQ69rMgVgsRufOnRu6jHpFmYWBMgsDZRYGPmTmNItt8ODBOHr0qM4zPbUVFBSEmzdval0nPH/+vHq5MViWRXZ2NlxcXExVYoNSKBS4dOmSRc8MMBZlFgbKLAyUWRj4kJlTB+mTTz5B48aN8dJLL+HatWsmLWjEiBFQKBRYv369uk0mkyEuLg7BwcHqGWw5OTm4ceOGxrbPDtZSWbduHfLy8jBgwACT1tlQlEolcnJyLPoBgMaizMJAmYWBMgsDHzJzusTWuXNnyGQypKWl4dChQ7C2toarq6vWHTMZhlHPxjFUcHAwRo4ciQULFuDhw4fw8/PD5s2bkZ2djQ0bNqjXGz9+PE6ePKkxEMvLywujR49Ghw4dYG1tjTNnzmDXrl0ICgrCm2++ySUqIYQQQgSIUwdJqVSiUaNG8PT01GivOiGO640at2zZgkWLFmHr1q0oLCxEx44dceDAAYSEhFS73bhx45CcnIy9e/eioqICXl5emDdvHt5//331VH1CCCGEkJqY3aNGzIW5PmpEoVAgIyMDrVu31jmqn48oM2XmK8pMmfnKXDMb89lOHSQ9zLWDRAghhBBu6vVZbNevX8ePP/6IrVu31nZXxAByuRzJycl1dpsFc0SZhYEyCwNlFgY+ZObcQbpw4QKCgoLQoUMHjBw5EhMnTlQvO3XqFGxtbbF//35T1EiewbIs8vLyePkgXn0oszBQZmGgzMLAh8ycOkjXrl1DREQEsrKyMHv2bERHR2ss79OnD5ydnbFnzx6TFEkIIYQQUp84dZBiY2MBAL///jtWrlyJF154QWM5wzDo0aMHLly4UPsKCSGEEELqGacO0smTJzF8+HD4+fnpXcfT01P9eBBiOmKxGEFBQWY1K6CuUWZhoMzCQJmFgQ+ZOd0HqaSkBK6urtWuU15ebtG3GDdXIpEIXl5eDV1GvaLMwkCZhYEyCwMfMnM6g+Th4YGrV69Wu87Fixfh6+vLqSiin1wuR2JiokXPDDAWZRYGyiwMlFkY+JCZUwdp0KBBOHLkCI4dO6Zz+e7du3Hu3DkMHTq0NrURHViWRUlJiUXPDDAWZRYGyiwMlFkY+JCZ0yW2hQsXIj4+HjExMZgwYQLu378PAPjmm2+QkpKCnTt3wtvbG++8845JiyWEEEIIqQ+cOkguLi44efIkXn31VY0HyE6fPh3A0wfO7ty5E02aNDFNlYQQQggh9ajWjxpJS0vDuXPnUFBQAAcHBwQHB2tN+7dE5vqoEaVSifz8fDg7O0MkqvWN0C0CZabMfEWZKTNfmWtmehabCZhrB4kQQggh3NTrs9j++ecfJCYmYt++fUhMTMQ///xT212SalRWVuLXX39FZWVlQ5dSbyizMFBmYaDMwsCHzJzGIAFAdnY2Zs6ciV9//VVjlDrDMBg0aBA+//xzeHt7m6JGUoUlT5vkijILA2UWBsosDJaemVMH6datW+jVqxcePnyI1q1bo1evXmjevDkePHiA5ORk7N+/H+fOnUNycjJatWpl6poJIYQQQuoUpw7S/PnzkZeXh2+//RZvvPEGGIZRL2NZFuvXr8fUqVMxf/58emAtIYQQQiwOp0HaTZs2RVhYGPbt26d3nSFDhuDUqVMoLCysVYENxVwHaatuvmVvb6/RMeUzykyZ+YoyU2a+MtfMdT5IW6FQoH379tWuExgYSM9iqyM2NjYNXUK9o8zCQJmFgTILg6Vn5tRBev7553Ht2rVq17l27Rq6du3KqSiin1wuR0JCgsUPfjMGZRYGyiwMlFkY+JCZUwfpk08+wcGDB/G///1P5/L169fj8OHDWLp0aa2KI4QQQghpCJwGaR8/fhzh4eF48803sWrVKo1ZbGfPnsXNmzcRFRWFY8eOaTzQlmEYLFq0yGTFE0IIIYTUBU4dpMWLF6v/np6ejvT0dK11Dh06hEOHDmm0UQeJEEIIIZaA0yy2kydPcn7B0NBQztvWJ3OexSaXyyGRSMxqZkBdosyUma8oM2XmK3PNbMxnO6czSJbSyeGr8vJy2NvbN3QZ9YoyCwNlFgbKLAyWntl8HrFLDCKXy5GUlGTRMwOMRZmFgTILA2UWBj5kpg4SIYQQQkgV1EEihBBCCKmCOkgWSCLhNHTMolFmYaDMwkCZhcHSM3OaxVbXZDIZPvzwQ2zduhWFhYXo2LEjli5disjISKP2ExkZiWPHjmHatGn46quvjNrWXGexEUIIIYSbOn8WW12bOHEiVq9ejXHjxmHt2rUQi8WIiYnBmTNnDN7Hjz/+iJSUlDqssmEolUo8fPgQSqWyoUupN5RZGCizMFBmYeBDZrPrIKWmpmLXrl1YtmwZVqxYgSlTpiAxMRFeXl6YN2+eQfuoqKjAnDlzMH/+/Dqutv4pFAqkpKQI6kHAlFkYKLMwUGZh4ENms+sgxcfHQywWY8qUKeo2a2trTJ48GSkpKcjNza1xH//973+hVCoxd+7cuiyVEEIIITxl0AgqkUjE6U6YDMMYfQ+ES5cuoU2bNlrXBrt16wYASEtLg4eHh97tc3JysHz5cmzcuBE2NjYGv65MJoNMJlP/u7i4GABQWVmJyspKAE+/D2KxGAqFQuO0oapdLpfj2SFdYrEYIpFIb7tqvyqqAW1Vv2fPtqu2qayshFQqhVKp1OihMwwDiUSit11f7Q2Z6Vm6MqnWUSqVGvu35EyGHCcAZvXeM0Wm6o6TqiaFQgGpVMqLTFXbq9auWqdqLZacydDjpPqTT5n01a5azrKs+g7Tlp6ppnYVXT+zGzJT1VzVMaiDFBISotVBKiwsxJUrVyAWi+Hh4aF+WG1ubi4UCgU6duyIpk2bGlyIyr179+Dm5qbVrmq7e/dutdvPmTMHnTt3xpgxY4x63WXLlmHJkiVa7UeOHIGtrS0AwNPTE507d8aVK1eQk5OjXsff3x8BAQFITU1FXl6euj0oKAheXl44deoUSkpK1O09evSAq6srjhw5ovGmCg8Ph42NDRISEjRqiImJQXl5OZKSktRtSUlJGDhwIPLz8zXGWtnb2yMiIgK5ublIS0tTt7u4uKBnz57IyMjQeHaeOWWSSCQ6M9nZ2cHe3h537tzB1atXeZGppuOUmZkJADh69ChvMhl6nDIzM9GuXTteZdJ3nDp06AB7e3skJyejtLSUF5lqOk6qGlXvbT5kMvQ4KRQKVFRU8CqTvuMUFRUFGxsb9XE2l0yqkx+G4DSL7e+//0avXr3Qp08ffPrpp/D09FQvy8nJwYIFC3D27FmcOXMGLVu2NGrfvr6+8Pf31zoImZmZ8PX1xZo1azBr1iyd2yYlJaFv3744f/48XnjhBQBPe6aGzGLTdQbJw8MD+fn56rNZ5tZD5+NvHZSJMlEmykSZKFNdZSouLoazs7NBs9g4dZDGjBmD7OxsnDt3Tu863bt3h4+PD3bu3GnUvgMDA9G8eXMcP35co/369eto3749vv32W7z55pta28nlcnTu3BnPP/88Nm/erG43tINUlblO81cqlcjNzYWHhwdEIrMbQlYnKDNl5ivKTJn5ylwz1/k0/2PHjqFv377VrhMREYFjx44ZvW83Nzfcu3dPq13V5u7urnO7LVu2ID09HW+++Says7PVXwBQUlKC7OxslJWVGV2PuVEoFEhLS7PomQHGoszCQJmFgTILAx8yc+ogVVRU6OzEPOvu3bsoLy83et9BQUG4efOm1nXC8+fPq5frkpOTg8rKSvTq1Qs+Pj7qL+Bp58nHxwdHjhwxuh5CCCGECA+nDlKXLl2wa9cuvTdiTE5Oxg8//KAeB2SMESNGQKFQYP369eo2mUyGuLg4BAcHq2ew5eTk4MaNG+p1xowZg3379ml9AU8Hke3btw/BwcFG10MIIYQQ4eH0oJRPPvkEffv2RZ8+ffDiiy+id+/ecHV1xcOHD3H69GkcOHAAEokES5cuNXrfwcHBGDlyJBYsWICHDx/Cz88PmzdvRnZ2NjZs2KBeb/z48Th58qR6IFZAQAACAgJ07tPHxwdDhw7lEtXsMAwDFxcXTrddsFSUWRgoszBQZmHgQ2bOz2I7fvw4pkyZgqysrKc7Yhh1Z8XHxwfr16+vcZySPhUVFVi0aBG2bdumfhbbxx9/jKioKPU6YWFhGh0kffg2SJsQQggh3Bjz2V6rh9WyLIszZ87g8uXLKCoqQpMmTdCpUyf07t3bonuNgPl2kBQKBTIyMtC6dWuIxeKGLqdeUGbKzFeUmTLzlblmNuazndMlNhWGYdCnTx/06dOnNrshRlAqlUhPT4evr69ZvenqEmWmzHxFmSkzX/Ehc606SMDT+xPduHEDjx8/xquvvmqKmgghhBBCGhTnuzdduHABQUFB6NChA0aOHImJEyeql506dQq2trbYv3+/KWokhBBCCKlXnDpI165dQ0REBLKysjB79mxER0drLO/Tpw+cnZ2xZ88ekxRJ/iUSieDp6WlWdyata5RZGCizMFBmYeBDZk6DtEeMGIHDhw/j0qVL8PPzw5IlS/DRRx9p3DFz9OjRuHz5ssa9iiyJuQ7SJoQQQgg3df6okZMnT2L48OHw8/PTu46np2eNd9smxlMoFLh06ZJF377dWJRZGCizMFBmYeBDZk4dpJKSEri6ula7Tnl5uUV/Y8yVUqlETk6OxpON+Y4yCwNlFgbKLAx8yMypg+Th4YGrV69Wu87Fixfh6+vLqShCCCGEkIbEqYM0aNAgHDlyBMeOHdO5fPfu3Th37hxvHu9BCCGEEGHhdB+khQsXIj4+HjExMZgwYQLu378PAPjmm2+QkpKCnTt3wtvbG++8845JiyVPZwb4+/tb9MwAY1FmYaDMwkCZhYEPmTk/aiQzMxOvvvoqUlJStJYFBwerO0mWimaxEUIIIfxS57PYAKBVq1Y4e/YsLl68iG+++QZLly7FF198gfPnzyMlJcWiO0fmTC6XIzk5GXK5vKFLqTeUWRgoszBQZmHgQ+ZaP2okKCgIQUFBJiiFGIJlWeTl5aEWzxi2OJRZGCizMFBmYeBDZsu9OEgIIYQQUkc4n0EqKSnBhg0bcPnyZdy9exeVlZVa6zAMg+PHj9eqQEIIIYSQ+sapg3ThwgVER0ejsLCw2tNnDMNwLozoJhaLERQUBLFY3NCl1BvKLAyUWRgoszDwITOnWWw9e/ZEamoqli1bhpdffhlubm4W/U3QhWaxEUIIIfxS57PYLl26hDFjxuDdd99Fy5Ytedc5MmdyuRyJiYkWPTPAWJRZGCizMFBmYeBDZk4dJCcnJ7i4uJi6FmIAlmVRUlJi0TMDjEWZhYEyCwNlFgY+ZObUQRo6dCgSExMt+iF0hBBCCCH6cOogLVu2DFKpFOPGjcOdO3dMXRMhhBBCSIPi/KiRixcvol+/figqKkLTpk11DnZiGAa3bt2qdZENwVwHaSuVSuTn58PZ2dmin3FjDMpMmfmKMlNmvjLXzMZ8tnPqIB0/fhwvvvgiKioqIJVK4erqColE9x0DsrKyjN29WTDXDhIhhBBizliFAsqraWAL8sE4OUPUIQiMmUzmMuazndN9kObPnw+WZfHDDz9gxIgRdL+jelRZWYkjR46gf//+kEqlDV1OvaDMlJmvKDNlNlcsy6KsrMz4DVNOgf3+azD/5P3b1swFeGMa0CPE6N3Z2to2WB+DUwfp+vXreOWVVzBy5EhT10MMYMnTJrmizMJAmYWBMpu/srIy2NnZGbXNYJem2NbBD4DmjaKV+Q+BZbF45epf2J9XaNQ+S0tL0bhxY6O2MRVOFwZdXFxgY2Nj6loIIYQQYoFEAP7bxvPp36uc8VH9+7M2nhb1AFhOZ5DGjRuHPXv2oLy8nDpKhBBCCM/Y2NjgwYMHBq8vun4VNksX6l/OMPCwtsKDpGNQtutgVB0NhdMg7SdPnmD06NEoKCjAp59+ik6dOhl9Ks7cmesgbdXNt+zt7QUz9osyU2a+osyU2Vw9fvzYqM/1kc2dEBfoV+N6r/3xF/Y8KDB4v6a+xFbng7RVPTqWZRESon/QFcMwFnfd1RII8awdZRYGyiwMlJl/7ssqTbqeOeB0ObBPnz4ICQlBaGgoQkJC9H716dOHU1EymQzz58+Hu7s7bGxsEBwcjKNHj9a43b59+xAVFQV3d3dYWVmhZcuWGDFiBP744w9OdZgjuVyOhIQEQXU8KbMwUGZhoMyWwdbWFqWlpQZ/Hbz999PZatVxdsHB238btV9bW9v6CawDpzNIJ06cMHEZmiZOnIj4+HjMmjULrVu3xqZNmxATE4OkpCT07t1b73ZXr15F06ZNMXPmTDg7O+P+/fvYuHEjunXrhpSUFHTq1KlO6yaEEEL4gGEYoy9tyafPgWzJe3qXW02bA4kZDVmpCacOUl1KTU3Frl27sGLFCsydOxcAMH78eAQGBmLevHlITk7Wu+2HH36o1fb666+jZcuWWLduHb799ts6q5sQQggRMkmfcCB2OZ58vRps/kN1O+PSHI2mzn663IKYXQcpPj4eYrEYU6ZMUbdZW1tj8uTJWLhwIXJzc+Hh4WHw/lxdXWFra4tHjx7VQbWEEEIIUZH0CYe4ZwiepP2GS4nH0TmiLxoFdTWbO2kbg1MHKSIiwqD1GIbB8ePHjdr3pUuX0KZNG63R5d26dQMApKWl1dhBevToESorK3H//n18/vnnKC4uRt++favdRiaTQSaTqf9dXFwM4OkdUCsrnw4qE4lEEIvFUCgUUCqV6nVV7XK5HM9OChSLxRCJRHrbVftVUT2upep16mfbWZZFZGSken9KpRIKhUK9LsMwkEgketv11d6QmZ4llUq1ageAmJgYrf1bcqaajhPDMOrjXFlZyYtMNR0n1XtbNcuHD5mqtletXSQSISYmBgA06rfkTDUdp2d/hlVWVvIiU03HSZVZLBaDZVleZKqpvdHz3dA5MOjpZC2lElAqzSJT1VzVqZMxSAzDgGVZTtMZ7927Bzc3N612Vdvdu3dr3Ef37t2Rnp4OALCzs8MHH3yAyZMnV7vNsmXLsGTJEq32I0eOqAeJeXp6onPnzrhy5QpycnLU6/j7+yMgIACpqanIy/v39upBQUHw8vLCqVOnUFJSom7v0aMHXF1dceTIEY03VXh4OGxsbJCQkKBRQ0xMDMrLy5GUlKRuk0gkGDhwIPLz85GSkqJut7e3R0REBHJzc5GWlqZud3FxQc+ePZGRkaH+3lhSpq5du6KgoACXL1/mTaaajtPNmzd5l8mQ49SmTRu0bduWV5n0HadOnTrByckJv/32G28y1XScjh49yrtMhh6n6OhoVFRU8CqTvuMUExODu3fv4sqVK2aVSXXywxCc7oOkT3FxMS5evIiFCxeiZcuW2LlzJ8RGnlbz9fWFv7+/1kHIzMyEr68v1qxZg1mzZlW7j5SUFBQXFyMzMxNxcXEICQnBsmXLqn0Gjq4zSB4eHsjPz1efzTKHHnplZSWOHj2KyMhI2Nra8u63Dl2/Scnlchw5cgQDBgzQeCq0JWeq6ThVVFTg8OHDiIyMhFQq5UWmmo6T6r0dFRUFa2trXmSq2l61dqVSiUOHDqF///4aD/y25Ew1HaeysjL1zzCpVMqLTDUdJ9V7Ozo6GlKplBeZamoHgISEBPVxNpdMxcXFcHZ2rrv7IOnj4OCAsLAwHD58GB06dMAnn3yic+B0dWxsbDQ6KioVFRXq5TXp0aOH+u9jxoxB27ZtAQArV67Uu42VlRWsrKy02qVSqVbHSiwW6+z4PfsDzpB2fR02Q9pVfxeJRBqdBhV97fpqN4dMKtVl0rW+pWfSd5xU+3p2f5aeyZDjpPo7nzKpVK1d9SEkkUh01mOJmWpqV9VY9b3Nh0w1tTMMA4ZheJVJX7vqva3rM7QhMxnzsOA6eSyKvb09oqOjERcXZ/S2bm5uuHfvnla7qs3d3d2o/TVt2hQRERHYvn270bUQQgghRJjq7LlxIpFIZ0enJkFBQbh586bWdcLz58+rlxurvLwcRUVFRm9nrvT1jvmMMgsDZRYGyiwMlp7ZpGOQVDIzM9G9e3c4OTnhxo0bRm17/vx5dO/eXeM+SDKZDIGBgWjWrBnOnTsHAMjJyUFZWRkCAgLU2z58+BCurq4a+8vOzkbHjh0RFBSEU6dOGVyHuT6LjRBCCCHc1Pmz2CZNmqSzXS6X486dOzhz5gwqKyvx0UcfGb3v4OBgjBw5EgsWLMDDhw/h5+eHzZs3Izs7Gxs2bFCvN378eJw8eVJjIFaHDh3Qt29fBAUFoWnTpsjIyMCGDRtQWVmJ5cuXGx/UDCmVSuTn58PZ2VnnNVw+osyUma8oM2XmK15kZjlgGKbar4CAAPb777/nsmuWZVm2vLycnTt3LtuiRQvWysqKfeGFF9hDhw5prBMaGspWLT82Npbt2rUr27RpU1YikbDu7u7smDFj2CtXrhhdQ1FREQuALSoq4pyjLjx58oT96aef2CdPnjR0KfWGMgsDZRYGyiwM5prZmM92TmeQsrKydLaLRCI4OjrC3t6ee48NT++cvWLFCqxYsULvOrruxbR48WIsXry4Vq9NCCGEEMKpg+Tl5WXqOgghhBBCzIZJh5gXFxfj/PnzsLa2Ru/evTndSZtUj2EY2NvbC+p7S5mFgTILA2UWBj5k5jSL7fvvv8e2bdvw008/oWnTpgCAy5cvIzo6Gg8ePADw9GaNzz6mw9LQLDbSUFiFAsqraWAL8sE4OUPUIcgiH/RICCHmxpjPdk5Dy7du3QqZTKbuHAHAnDlz8PDhQ7z22muIiYlBSkoK1q1bx2X3pBpKpRK3b9/WuO063wkps/x0EsrHDUXF3KmQffohKuZORfm4oZCfTqp5YwsnpOOsQpmFgTJbJk4dpJs3b6JTp07qf//zzz9ISkrC66+/jv/973/45Zdf8MILL9Ddq+uAQqFAWlqa1tPu+cwSM7Msi8ePHxv3dewgZEvegzL/oea+8h9CtuQ9PD520Kj9cTg53KAs8TjXFmUWBspsmTiNQXr06BFcXFzU/z59+jQA4KWXXlK39e7dGxs3bqxleYRYprKyMtjZ2Rm8vgjA9V6d4G7VCCId1+yVLIs7ixeg/dnLMPT3sdLSUjRu3NjgGgghhPyL0xmkZs2aaTxG5Pjx4xCLxejVq5e6jWVZracBE0J06+Voj5bWVjo7RwAgYhh4WFuhl2PtbqFBCCHEMJzOIHXs2BE///wzZs+eDWtra+zYsQO9evXS+G01Ozsbbm5uJiuUPMUwDFxcXCx6ZoCxLDGzjY2NesKCIcTJJ4GvVta43r64DVD0DDW4Bktiice5tiizMFBmy8RpFltSUhL69eun0fbTTz/hxRdfBPB0cJabmxsiIiKwc+dO01Raz2gWG6mNx48fG3WJrY+jPQ52aVvjetG//4nTj0oM2iddYiOEEE11PostPDwc+/fvx7BhwzBs2DD88MMP6s4RAJw9exbu7u4aY5KIaSgUCty4ccOiB74ZSwiZzz4qwd8VMij1/L6iZFnkVshw1sDOkSUSwnGuijILA2W2TJxvFDlw4EAMHDhQ57I+ffrg0qVLnIsi+imVSqSnp8PX1xdigdwbxxIz29raorS01LiNUk4ByxeDBVD1pLSIYeCxeBmKe4QYVYMlscTjXFuUmTLzFR8ym/RO2oSQpxiGMf7yVr9oyK2s8eTr1WCfmerPuDRHo6mzIekTbuIqCSGE6FOrDlJqaiouXLiAR48e6TyNxjAMFi1aVJuXIERQJH3CIe4Zgidpv+FS4nF0juiLRkFd6U7ahBBSzzh1kAoKCjB06FCcPXu22pvRUQfJ9EQiETw9PSEScRo+ZpGElpkRiyEJ6gorUSNIOnYUTOdIaMcZoMxCQZktE6dZbBMnTsSWLVsQFhaGCRMmoGXLlpBIdPe1QkMNm5JsbmgWGyGEEMIvxny2czqDdODAAXTr1g3Hjx+36HscWCKFQoErV66gY8eOFjvwzViUmTLzFWWmzHzFh8yczn2Vl5cjJCSEOkcNQKlUIicnx6IfAGgsyiwMlFkYKLMw8CEzpw5SUFAQsrOzTVwKIYQQQoh54NRBio2Nxf79+3Hu3DlT10MIIYQQ0uA4jUG6f/8+Bg4ciNDQUIwbNw7PP/+83sFO48ePr1WBRJNIJIK/v79FzwwwFmUWBsosDJRZGPiQmdMsNpFIBIZhNKb4Vx2PxLIsGIax2NuM0yw2QgghhF/qfBZbXFwcp8JI7cnlcqSmpqJbt256b63AN5SZMvMVZabMfMWHzJyqnjBhgqnrIAZiWRZ5eXnV3qCTbyizMFBmYaDMwsCHzJbZrbNgLMuirKyM27ZyOR5fOAf7SxfwuFkTSDs+D3C4v4StrS3dooEQQgipRq06SNnZ2di+fTvS0tJQXFwMBwcHBAUFYdy4cfD29jZRifxSVlYGOzs7o7cb7NIU/23jiZbWVggGgDNH8XeFDPNu5mB/XqFR+yotLTX+QaqEEEKIgHAapA0Aa9euxbx58yCXy7VOoUmlUvz3v//FzJkzTVJkQ6irQdqPHz82uoM02KUptnXwAwCInjnzo/z/7/srV/8yqpNkSR0kVqGA/Mol5Gekw7m1PyQdOwvi2WRKpRK5ubnw8PCw6FkgxqDMlJmvKLP5ZDbms51TB+nAgQMYPHgwnJ2dMXv2bISHh8PNzQ33799HUlISVq9ejX/++Qf79+/HwIEDOQdpSHXVQTL6EptCAbwxFvgnT/86zi7A+h0GX26zlEts8tNJePL1arD5D9VtjLMrGk17B5I+4Q1YGSGEEEtU5x2kiIgIXLlyBWlpaWjZsqXW8tzcXHTu3BmdOnXC8ePHjd29WTCXaf6KtN9RMXdqjetZr/wG4qAu9VBR/ZCfToJsyXt6l1vFLud1J0kul+PUqVMICQmx2BkgxqLMlJmvKLP5ZK7zaf4XL17EuHHjdHaOAMDDwwOjRo3Cjh07uOye14w+g3TvjkGrVdy7A7QOMGjd+j6DxOms2Verql1F9vUqyDp14d1ZMxWWZVFSUmLRM0CMRZmFgTILAx8yc+ogPXnypMYxLHZ2dnjy5AmnomQyGT788ENs3boVhYWF6NixI5YuXYrIyMhqt/vxxx/xww8/4MKFC7h//z48PDwwaNAgLFq0CI6OjpxqMTVjB2n3cbTHwS5ta1wveuwrOP2oxKB91vcYpDrJnJ+HaK+WZpuZEEKIZeM0cqpNmzb45ZdfIJfLdS6Xy+U4cOAA2rRpw6moiRMnYvXq1Rg3bhzWrl0LsViMmJgYnDlzptrtpkyZgj///BOvvPIKvvjiCwwYMABfffUVevTogfLyck61NLSzj0rwd4VMPSC7KiXLIrdChrMGdhQagrG/QbSwkpp0PS41EEIIETZOZ5DGjx+PuXPnIioqCv/973/Rpcu/Y19+++03LFiwAOnp6Vi5cqXR+05NTcWuXbuwYsUKzJ07V/16gYGBmDdvHpKTk/VuGx8fj7CwMI22Ll26YMKECdi+fTtef/11o+sxNVtbW5SWlhq3UcopYPlisACqXiQSMQw8Fi9DcY8Qo2owZ/dllSZdzxKJxWL06NEDYgHM2FOhzMJAmYWBD5k5DdJWKBQYPnw49u/fD4ZhYGtrC1dXVzx8+BBlZWVgWRZDhgzB3r17jZ7eN2/ePKxevRoFBQUaA6iWLVuGhQsXIicnBx4eHgbvr6SkBA4ODnjnnXewalX141qeZS6DtFV0zuhyaY5GU2eb/WBlY29tIAJwvVcnuFs10ritgYqSZXFH9gTtz16G0sB90iU2QgghdT5IWywW46effsKWLVuwefNmpKWlIScnBw4ODggODsaECRPw6quvcir+0qVLaNOmjVbh3bp1AwCkpaUZ1UG6f/8+AMDZ2bna9WQyGWQymfrfxcXFAIDKykpUVj49UyESiSAWi6FQKKBU/vvRrGqvek8osVgMkUikt121XxXVSP+qly4lEgnEvcMgeaEHKi9fxLXTp9C+TwhsugaDZRiN/TAMA4lEAqVSqfGgYFW7vtrrMpONjQ0KCwu12nVllUqlUCqVUJ49CWblx1pnzdj/z+L2wVIU9OhjcCapVAqFQlHnx6m6TLqOh772iooKHDt2DBEREZBKpQ363jNVppqOU2VlJRITE9GvXz9YW1vzIlPV9qq1K5VK9XF+dqaPJWeq6TiVlZUhMTFR/d7mQ6aajpPqvd2/f39IpVJeZKqpHQAOHz6sPs7mkqlqrurUau7d+PHjMX78+NrsQsu9e/fg5uam1a5qu3v3rlH7++yzzyAWizFixIhq11u2bBmWLFmi1X7kyBH1JSlPT0907twZV65cQU5Ojnodf39/BAQEIDU1FXl5/96vKCgoCF5eXjh16hRKSv4dI9SjRw+4urriyJEjGm+q8PBw2NjYICEhQaOGmJgYlJeXIykp6WlD67bIyy/EQLEYeQ8fIiUlRb2uvb09IiIikJubi7S0NHW7i4sLevbsiYyMDKSnp6vb6yvTyZMndWZ6tl0ikWDgwIF4+PAhUmQKuES/BP/TR2Fd+u/ryOwckN6nH/JkCrhcu4aePXvixo0bOjNdunSpYY/TM5ny8/ONOk63bt2CQqHA0aNH6/U41WUmQ997t27dQvv27XmVSd9x6tChA+RyOZKTkzUuvVtyppqOU1JSksZ7mw+ZDD1OcrkcFRUVvMqk7zj1799f4zibSybVyQ9DcL6Tdl3x9fWFv7+/1kHIzMyEr68v1qxZg1mzZhm0rx07dmDcuHGYN28ePvvss2rX1XUGycPDA/n5+eqzWebQQ6+srMTRo0cRGRkJW1tb3v3WUfU3qad30r6IKydPoFNYBMQdgtR30rbUTM/WXt0ZpMOHDyMyMlJQZ5COHj2KqKgoQZ1BOnToEPr37y+oM0iqn2FCOoN09OhRREdHC+oMUkJCgvo4m0um4uJiODs7190ltgMHDmDjxo346quv4O7urrX87t27mD59Ot544w1ER0cbtW8bGxuNjopKRUWFerkhTp8+jcmTJyMqKgqffPJJjetbWVnByspKq10qlaoPropYLNY58EzfzbD0tVfdrzHtqr+LRCKd47z0teur3RwyqWjULpWC6fwCHtzLgySoi871LS6TAe2ququ+/yw9kyHHSfV3PmVSqVq76kNIIpHorMcSM9XUrqqx6nubD5lqamcYBgzD8CqTvnbVe1vXZ2hDZtJXvy6cpvl//fXXuHXrls7OEQC4u7sjKysLX3/9tdH7dnNzw71797TaVW36XvNZly9fxuDBgxEYGIj4+HizuotnbUkkEoSHh/MqU00oszBQZmGgzMLAh8ycOkiXL19GcHBwtesEBwdrXGM0VFBQEG7evKl1nfD8+fPq5dW5desWBgwYAFdXVyQkJBj9YFhLYOhZND6hzMJAmYWBMguDpWfm1EEqKCiAq6trtes4OzsjPz/f6H2PGDECCoUC69evV7fJZDLExcUhODhYPYMtJycHN27c0Nj2/v376N+/P0QiEQ4fPgwXFxejX9/cyeVyJCQk6L1JJx9RZmGgzMJAmYWBD5k5nftycXHRGGmuS3p6OpycnIzed3BwMEaOHIkFCxbg4cOH8PPzw+bNm5GdnY0NGzao1xs/fjxOnjypMRBrwIAByMzMxLx583DmzBmNO283b968xkeVEEIIIYQAHDtIISEh2Lt3L65cuYKOHTtqLb98+TL279+Pl156iVNRW7ZswaJFizSexXbgwAGEhFR/t+jLly8DAP773/9qLQsNDaUOEiGEEEIMwqmDNH/+fOzduxe9e/fG3LlzERkZieeeew537tzBkSNHsGrVKohEIixYsIBTUdbW1lixYgVWrFihd50TJ05otZnZHQsIIYQQYqE43wdp7969mDBhgtZDYFmWhZ2dHbZs2YKhQ4eaosYGYW6PGlFhWRZyuRwSiQSMjsdw8BFlpsx8JbTMrEIBxdVLkOc9hMTFFeIOndX3NeMzoR1nwHwz1/mjRgBg+PDh6NOnDzZt2oQLFy6gqKgIjo6O6NatGyZMmMDLAdLmory8HPb29g1dRr2izMJAmfmr6vMkFQAYZ1c0mvaO2T9P0hSEcpyfZemZze5O2ubCXM8gVVZWIiEhATExMUbd8MqSUWbKzFeWmJllWZSVlRm3UcopYPlirWcrqr23GOhR/RjTZ9na2prVWYmaWOJxri1zzVwvZ5AIIYQIz+PHj406KyACcL1XJ7hbNYJIR6dGybK4s3gB2p+9DKX25jqVlJTw8h53xLxwug8SIYQQYTL27FEvR3u0tLbS2TkCABHDwMPaCr0cDe90GX0GixAOqINkgSz51u1cUWZhoMzmz9bW1qj1W1gZdnnF0PW41GAOLO04m4KlZ6YxSHqY6xgkQviIVSigvJoGtiAfjJMzRB2CBDG7yRIZPQbpahrwwTs1r7d0NdAhyKBdWtoYJGI+aAwSjymVSuTn58PZ2Vnn05D5iDLzO3PV2U2AcGY3WeJxZhgGjRs3Nnh99oXuKHd21Ti+Wvt0aQ6bF7rztlNsice5tviQ2TKrFjCFQoGUlBQoFIqGLqXeUGbLwLIsHj9+bNzXsYOQLXkPyiofnmz+Q8iWvIfHxw4atT9LOyFuicfZWIxYjEbTqj+D1GjqbN52jgBhHOeq+JCZ0xmku3fvwt3d3dS1EEIsWFlZmVEzi+pidlNpaalRZzdI/ZD0CQdil2ufKXRpjkZTZ/P+TCGxTJw6SN7e3oiOjsYbb7yBmJgYiz19RggxHWPP3qhmN+nz7Oym049K6qQGUn8kfcIh7hmCJ2m/4VLicXSO6ItGQV15feaIWDZOPZvu3bvjl19+wZAhQ+Dp6YkPP/wQ2dnZJi6N6MIwDOzt7QU1QJEy81NdzG6yNEI4zs9ixGKIO3VBWZfuEHfqIpjOkdCOM8CPzJxnsd28eRPff/89tm7diocPH0IkEqFfv3544403MGTIEIuf3kez2AgxzuPHj426xNbH0R4Hu7Stcb3o3/80+AwSXWIjhFTHmM/2Wk/zl8vl+Pnnn/G///0PR48eBcuycHZ2xsSJEzF58mS0adOmNrtvMObaQVIqlcjNzYWHh4dgLm1SZsvIbPT0b4UCeGMs8E+e/nWcXYD1OwADzzRY2vRvSzzOtUWZKXNDMuazvdZVSyQSDB8+HAcPHkR2djZiY2MhEomwcuVKtG3bFuHh4di9ezeNDTARhUKBtLQ0i54ZYCzKbBlU078N/nJwgNX0OdXu02raHDR2cDB4n5bUOQIs8zjXFmUWBj5kNlm3TqlU4vfff8eFCxeQl5cHlmXh4eGBs2fP4uWXX0anTp2QkZFhqpcjhPCApE84rGKXg3F21WhnXJrDKnY5zW4ihDSYWg8UyszMxP/+9z9s3rwZ9+/fV59ReuuttxAeHo779+9jzZo1WLNmDf7zn//g2LFjpqibEMITNLuJEGKOOHWQKisrsXfvXnz//fc4efIklEolfHx88Omnn+K1116Dq+u/vw22aNECn332GYqLi7FlyxaTFS5UDMPAxcXF4i4l1AZl5j/V7CalTCG42U1COs4AZRYKPmTmNEjbxcUFBQUFEIvFePHFF/Hmm2+if//+1W6zfPlyLFy4EEqlobd8a1jmOkibEEIIIdzU+SBtW1tbLFmyBLdv38bevXtr7BwBwNSpU5GVlcXl5cgzFAoFbty4YdED34xFmYWBMgsDZRYGPmTm1EHKzs7GBx98ADc3N4O3cXBwgJeXF5eXI89QKpVIT0+3mDNxpkCZhYEyCwNlFgY+ZObUQbLka4qEEEIIITXhNEh70qRJNa4jEong4OAAf39/DBo0CM899xyXlyKEEEIIqXecOkibNm1Sn0XSNcabYRiN9rfffhsffvghPvjgA45lEhWRSARPT0+zujNpXaPMwkCZhYEyCwMfMnOaxZaVlYVZs2YhNTUVM2fORK9evdC8eXM8ePAAZ8+exRdffIFu3brh/fffx+XLl7F06VLk5uZix44dGD16dF3kMDmaxUYIIYTwS53PYvvhhx9w/vx5pKWl4b333kOfPn3Qpk0b9OnTB++99x4uXryIc+fOISkpCa+//jrOnj0LOzs7fPPNN5wCkX8pFApcunTJomcGGIsyCwNlFgbKLAx8yMypg7RhwwaMGjUKzZs317m8RYsWGDlyJL7//nsAwHPPPYdBgwbh8uXL3CslAJ7ODMjJybHomQHGoszCQJmFgTILAx8yc+og/f3337Cysqp2HWtra/z999/qf3t6eqKiooLLyxFCCCGE1CtOHaTnnnsOP/30k94OT0VFBX766SeNmWsPHz5E06ZNuVVJCCGEEFKPOHWQJk+ejFu3bqF3797Yv38//vnnHwDAP//8g/3796N3797IzMzUuB3A6dOn0alTJ9NULWAikQj+/v4WPTPAWJRZGCizMFBmYeBDZk6z2BQKBV577TVs27ZNPd1fJBKprzWyLIuxY8diy5YtEIlEePDgAZYvX44BAwYgKiqqxv3LZDJ8+OGH2Lp1KwoLC9GxY0csXboUkZGR1W6Xnp6Ob7/9FufPn8fFixchk8mQlZUFb29vYyPSLDZCCCGEZ+p8FptYLMaWLVtw7NgxjB8/HkFBQfD29kZQUBAmTJiAo0ePYtu2beqeY/PmzbFmzRqDOkcAMHHiRKxevRrjxo3D2rVrIRaLERMTgzNnzlS7XUpKCr744guUlJSgbdu2XKKZPblcjuTkZMjl8oYupd5QZmGgzMJAmYWBD5k53Sjy1KlTcHBwQEREBCIiIkxaUGpqKnbt2oUVK1Zg7ty5AIDx48cjMDAQ8+bNQ3Jyst5tBw8ejEePHsHe3h4rV65EWlqaSWszByzLIi8vT+cNOvmKMgsDZRYGyiwMfMjM6QxSeHg41q9fb+paAADx8fEQi8WYMmWKus3a2hqTJ09GSkoKcnNz9W7r5OQEe3v7OqmLEEIIIcLB6QySq6srrK2tTV0LAODSpUto06aN1rXBbt26AQDS0tLg4eFh8teVyWSQyWTqfxcXFwMAKisrUVlZCeDpOCuxWAyFQqFxbwdVu1wu1+gti8ViiEQive2q/apIJE8PR9VTks+2q7aprKyEVCqFUqnUuBEXwzCQSCR62/XV3pCZnqUrk2odpVKpsX9LzmTIcQJgVu89U2Sq7jipalIoFJBKpbzIVLW9au2qdarWYsmZDD1Oqj/5lElf7arlLMuCZVleZKqpXUXXz+yGzFQ1V3U4dZAiIyNx4sQJsCyrHqRtKvfu3YObm5tWu6rt7t27Jn09lWXLlmHJkiVa7UeOHIGtrS2Ap/dy6ty5M65cuYKcnBz1Ov7+/ggICEBqairy8vLU7UFBQfDy8sKpU6dQUlKibu/RowdcXV1x5MgRjTdVeHg4bGxskJCQoFFDTEwMysvLkZSUpG5LSkrCwIEDkZ+fj5SUFHW7vb09IiIikJubq3GJ0cXFBT179kRGRgbS09PV7eaUSSKR6MxkZ2eHoKAg3L17F1euXOFFppqOU1ZWFgDg6NGjvMlk6HHKyspC27ZteZVJ33Hq2LEjgoKCkJycjNLSUl5kquk4qWpUvbf5kMnQ48SyLEpKSniVSd9xio6ORqtWrdTH2VwyqU5+GILTLLa7d++iR48e6N+/Pz777DM4OTkZuwu9fH194e/vr3UQMjMz4evrizVr1mDWrFk17mflypV49913DZ7FpusMkoeHB/Lz89Vns8yth87H3zooE2WiTJSJMlGmuspUXFwMZ2dng2axcTqD9Morr8DR0REbN27Etm3b4OPjg+bNm2udTWIYBsePHzdq3zY2NhodFRXVTSltbGy4lFwjKysrnXcHl0qlkEqlGm1isRhisVhrXdUbxdD2qvs1pF0ul+PUqVMICQkB8PSNous+E/ra9dXekJmqqlq7XC5HYmIiQkJCdK5viZlqamdZFqdPn0ZISIhGXZacqabjVPW9zYdMVVWtXS6X48SJE3rf25aYqaZ2hmHUx/nZdSw5U021P/velkgkvMhUU3vVzM9qyEz66te5H4PXfMaJEyfUf5fJZLhx4wZu3LihtR6Xy29ubm64c+eOVvu9e/cAAO7u7kbvk09Up2gteWaAsSizMFBmYaDMwsCHzJxmsSmVSoO+uDzFNygoCDdv3tS6Tnj+/Hn1ckIIIYSQumR29wAfMWIEFAqFxm0EZDIZ4uLiEBwcrJ7BlpOTo/OsFSGEEEJIbXG6xPas0tJS3Lx5E48fP0afPn1qXVBwcDBGjhyJBQsW4OHDh/Dz88PmzZuRnZ2NDRs2qNcbP348Tp48qXH6rqioCF9++SUA4OzZswCAr776Co6OjnB0dMT06dNrXV9DE4vF6NGjh87rsXxFmYWBMgsDZRYGPmTmNIsNALKzszFz5kwkJCRAqVSCYRj1KPazZ8/ijTfewDfffIOwsDCj911RUYFFixZh27Zt6mexffzxxxqPKgkLC9PqIGVnZ8PHx0fnPr28vJCdnW1wDfQsNkIIIYRf6vxZbDk5OejevTsSEhIwZMgQ9OjRQ6OjEhwcjPz8fOzcuZPL7mFtbY0VK1bg3r17qKioQGpqqtZz3FT3YXqWt7e3+kZcVb+M6RyZs8rKSvz6669G3ezK0lFmYaDMwkCZhYEPmTl1kGJjY1FYWIiTJ08iPj4ekZGRGsslEgn69OmjvsxFTMuSH/7HFWUWBsosDJRZGCw9M6cO0uHDhzFs2DD07NlT7zpeXl46p+sTQgghhJg7Th2kgoKCGu9OzbKszhs+EkIIIYSYO04dpObNmyMjI6Pada5evQpPT09ORRH9JBIJwsPD9d4plI8oszBQZmGgzMLAh8ycOkiRkZE4cOCAxoNDn3X69GkkJiYiJiamVsUR3erqcSvmjDILA2UWBsosDJaemVMH6YMPPoCNjQ1CQkLwySef4K+//gIAHDx4EIsWLcKAAQPg7OyMd99916TFkqeD3hISEix+8JsxKLMwUGZhoMzCwIfMnM59eXt74/DhwxgzZgwWLVoEhmHAsiwGDRoElmXh6emJ+Ph4uLm5mbpeQgghhJA6x/niYHBwMDIyMvDLL7/g/PnzKCgogIODA4KDgzFkyBA0atTIlHUSQgghhNSbWo2ekkgkGDZsGIYNG2aqegghhBBCGhznR43wnbk+aoRlWcjlckgkEjAM09Dl1AvKTJn5ijJTZr4y18zGfLZzPoP05MkT/PTTT7hw4QIePXoEhUKhtQ7DMBoPmCWmUV5eDnt7+4Yuo15RZmGgzMJAmYXB0jNz6iDdvn0bkZGRuHXrltbz0J5FHSTTk8vlSEpKQkxMDKRSaUOXUy8oM2XmK8pMmfmKD5k5dZBmz56Nv/76C6+++iomTZqEli1bWvTNoAghhBBCnsWpV5OYmIi+ffti8+bNpq6HEEIIIaTBcbpRpFKpROfOnU1dCzGQEM/WUWZhoMzCQJmFwdIzc5rFFhkZCWtra/zyyy91UZNZMNdZbIQQQgjhxpjPdk5nkJYvX47ExETEx8dzKpBwp1Qq8fDhQyiVyoYupd5QZmGgzMJAmYWBD5k5nf/69ddfER4ejtGjRyM0NBTPP/+8zp4YwzBYtGhRrYsk/1IoFEhJSUFMTAxEIk79W4tDmSkzX1Fm/mdmFQpUpv2GzMTjaBLRF42CuoIRixu6rDrHh+PMqYO0ePFi9d9PnDiBEydO6FyPOkiEEEKESn46CU++Xg02/yE6AJAf/hkKZ1c0mvYOJH3CG7o8UgNOHaSkpCRT10EIIYTwhvx0EmRL3tNqZ/MfPm2PXU6dJDPHqYMUGhpq6jqIgRiGgb29vVndur2uUWZhoMzCYImZlUol8vPzjdhAAZsvV4ABoCslC6D8q5Uobx0AiAy73Obs7GxRl6os8ThXRc9i04NmsRFCCAGAhw8fonnz5gav38fRHge7tK1xvejf/8TpRyUG7fPBgwdwdXU1uAaiW53PYgOe3kZ8zZo16NatGxwcHDTud5CWloapU6fi5s2bXHdP9FAqlbh9+7ZFzwwwFmUWBsosDELI3MLKsEdrGLqeJeLDceZ0ia28vBz9+/dHcnIynJ2d4eDggMePH6uX+/j4IC4uDk5OTli6dKnJiiVPZwakpaXB3d3dok631gZlpsx8RZktI7OzszMePHhg8Pqi61eBpQtrXO+bXbvxVbsOBtdgSSzxOFfFqYP06aef4uzZs1i+fDneffddLFmyBB9//LF6eZMmTRAaGorDhw9TB4kQQohFE4lERl3eYpuFodzZFWz+Q73rMC7N0axPmCCm/FsqTt26H374AeHh4Zg3bx4YhtE5CKtVq1bIycmpdYGEEEKIJWHEYjSa9k616zSaOps6R2aOUwcpJycHXbt2rXYde3t7FBUVcSqK6McwDFxcXCx6ZoCxKLMwUGZhEEpmSZ9wWMUuB+OseeaJcWkOKwFM8efDceZ0ic3e3h4PH+o/dQgAt27dgouLC6eiiH4SiQQ9e/Zs6DLqFWUWBsosDELKLOkTDnHPECivpoEtyAfj5AxRhyBBnDniw3HmdAape/fu+OWXX/Do0SOdy3Nzc5GQkICQkJDa1EZ0UCgUuHHjBhQKRUOXUm8oszBQZmEQWmZGLAY6BOEvdy9AIJ0jgB/HmVMH6d1330VhYSH69u2Ls2fPQi6XAwDKyspw/PhxREVFQS6X4513qr8Gq49MJsP8+fPh7u4OGxsbBAcH4+jRowZte+fOHYwaNQqOjo5wcHDAkCFDkJmZyakOc6RUKpGenm7RUyeNRZmFgTILA2UWBj5k5nSJLSQkBF999RVmzpypcZbI3t4eACAWi/HNN9+gS5cunIqaOHEi4uPjMWvWLLRu3RqbNm1CTEwMkpKS0Lt3b73blZaWIjw8HEVFRVi4cCGkUinWrFmD0NBQpKWloVmzZpzqIYQQQoiwcOogAcB//vMfhIWF4dtvv8X58+dRUFAABwcHBAcHY+rUqWjfvj2n/aampmLXrl1YsWIF5s6dCwAYP348AgMDMW/ePCQnJ+vd9ptvvkFGRgZSU1PxwgsvAACio6MRGBiIVatW4dNPP+VUEyGEEEKEhXMHCQDatm2LtWvXmqoWAEB8fDzEYjGmTJmibrO2tsbkyZOxcOFC5ObmwsPDQ++2L7zwgrpzBAABAQHo27cvdu/ezYsOkkgkgqenp8XeeIsLyiwMlFkYKLMw8CFzrTpIdeHSpUto06aN1jNSunXrBuDpY0x0dZCUSiWuXLmCSZMmaS3r1q0bjhw5gpKSEvVlwKpkMhlkMpn638XFxQCAyspKVFZWAnh6wMViMRQKhcZ1VVW7XC7Hs4+2E4vFEIlEettV+1VRPa5FNaZLX3tgYCCUSiXEYjGUSqXGIDiGYSCRSPS266u9oTOpSKVSnbV37twZSqVSY/+Wnqm64wT8e5yVSiUvMhlynAIDA9XL+ZLp2XZdtXfu3BlyuVyjfkvPVN1xUiqVGu9tPmQy5DgFBgZCJBKBZVneZKquXSqVolOnThr1mEOmqrmqY3YdpHv37sHNzU2rXdV29+5dndsVFBRAJpPVuK2/v7/O7ZctW4YlS5ZotR85cgS2trYAAE9PT3Tu3BlXrlzRuAmmv78/AgICkJqairy8PHV7UFAQvLy8cOrUKZSU/PtAwh49esDV1RVHjhzReFOFh4fDxsYGCQkJGjXExMSgvLwcSUlJ6jaJRIKBAwciPz8fKSkp6nZ7e3tEREQgNzcXaWlp6nYXFxf07NkTGRkZSE9PV7dbQiY7Ozs4OTnB0dERV65c4UWmmo5Teno6MjIyeJXJ0OPUunVrtGvXjleZ9B2njh074tGjRygoKEBpaSkvMvHxOJkqU1RUFJ48ecKrTPqO04ABA3D+/HmN1zSHTKqTH4Zg2Ge7WWbA19cX/v7+WgchMzMTvr6+WLNmDWbNmqW1XW5uLjw9PfHZZ59h3rx5Gss2btyIyZMn49KlSwgKCtL5urrOIHl4eCA/P199NssceuiVlZU4evQoIiMjYWtry8vfOqrWLpfLceTIEQwYMEDjdK0lZ6rpOFVUVODw4cOIjIyEVCrlRaaajpPqvR0VFQVra2teZKraXrV2pVKJQ4cOoX///hoP/LbkTDUdp7KyMvXPMKlUyotMNR0n1Xs7OjoaUqmUF5lqageAhIQE9XE2l0zFxcVwdnZGUVGR1pWqqszuDJKNjY1GR0WloqJCvVzfdgA4bQsAVlZWsLKy0mqXSqXqg6siFosh1nEvi2d/wBnSXnW/xrSr/i4SiXRe49XXrq92c8ikUl0mXetbeiZ9x0m1r2f3Z+mZDDlOqr/zKZNK1dpVH0ISiURnPZaYqaZ2VY1V39t8yFRTu+rRXHzKpK9d9d7W9RnakJn01a9zPwavWU/c3Nxw584drfZ79+4BANzd3XVu5+TkBCsrK/V6xmyri6r3aczpuPpQWVmJsrIyFBcXG3WgLRllpsx8RZkpM1+Za2bVZ7ohF8/MroMUFBSEpKQkFBcXa5z+On/+vHq5LiKRCB06dMBvv/2mtez8+fNo1aqV3gHauqiuX+qbMUcIIYQQy1RSUoImTZpUu47ZjUE6f/48unfvrnEfJJlMhsDAQDRr1gznzp0D8PSBuWVlZQgICFBv+9lnn+G9997DhQsX1A/TTU9PR/v27TF37lwsX77c4DqUSiXu3r0Le3t7s3rYnmpsVG5ubo3XT/mCMlNmvqLMlJmvzDUzy7IoKSmBu7t7jbcgMLsOEgCMGjUK+/btw+zZs+Hn54fNmzcjNTUVx48fV9+5OywsDCdPntQ4TVZSUoLOnTujpKQEc+fOhVQqxerVq6FQKJCWlsaLh+cWFxejSZMmBg0w4wvKTJn5ijJTZr7iQ2azu8QGAFu2bMGiRYuwdetWFBYWomPHjjhw4ECND7+1t7fHiRMnMHv2bCxduhRKpRJhYWFYs2YNLzpHhBBCCKkfZtlBsra2xooVK7BixQq965w4cUJne8uWLbFnz546qowQQgghQmC59wAXKCsrK8TGxuq8JQFfUWZhoMzCQJmFgQ+ZzXIMEiGEEEJIQ6IzSIQQQgghVVAHiRBCCCGkCuogEUIIIYRUQR0kQgghhJAqqINECCGEcODt7Y2JEyc2dBmkjlAHyUzdunULb775Jlq1agVra2s4ODigV69eWLt2LebPn69+KnR1X2FhYQ0dwyhCy1xd3vLycvV6CoUCcXFxCAsLUz+U2dvbG6+99prOZw+a2qZNm6r9nqse/2MqSqUSmzZtwuDBg+Hh4YHGjRsjMDAQS5cuRUVFhd7t/vzzTzAMA2trazx69EjnOmFhYQgMDDRpvTXJysrC9OnT0aZNG9ja2sLW1hbt2rXDtGnTcOXKFZ3bzJs3DwzDYPTo0fVaq6nwIXN17/v33nuvocurF4b+33+2TSQSwd3dHf3799e6X6G3tzcYhkG/fv10vt7333+v3k99/GyriVneKFLofv31V4wcORJWVlYYP348AgMD8eTJE5w5cwbvvvsuevfuja1bt6rXLy0txX/+8x8MGzYML730krq9efPmDVE+J0LLXFPea9euYf369SgvL8dLL72EQ4cOISQkBAsXLoSTkxOys7Oxe/dubN68GTk5OWjZsmWd1/zRRx/Bx8dHq93Pz8+kr1NWVobXXnsN3bt3x1tvvQVXV1ekpKQgNjYWx48fR2Jios7nI27btg0tWrRAYWEh4uPj8frrr5u0Li4OHDiA0aNHQyKRYNy4cejUqRNEIhFu3LiBH3/8EevWrUNWVha8vLzU27Asi507d8Lb2xu//PILSkpKjHrQdkPjW2Zd7/v67mQ3NEP+70dGRmL8+PFgWRZZWVn45ptvEBERgV9//RXR0dHq9aytrZGUlIT79++jRYsWGvvbvn07rK2tq/1FqF6xxKxkZmaydnZ2bEBAAHv37l2t5RkZGeznn3+u0ZaXl8cCYGNjY+upStMSWmZj8k6bNo0FwK5Zs0ZrPblczq5YsYLNzc2t03rj4uJYAOyFCxfq9HVUZDIZe/bsWa32JUuWsADYo0ePai1TKpWst7c3+84777DDhg1jw8LCdO47NDSUbd++vclr1uWvv/5iGzduzLZt21bnca6srGTXrl3L5uTkaLQnJiayANjExERWKpWymzZtqpd6TYFPmQ1533t5ebETJkyov6LqmaH/9wGw06ZN02i7cuUKC4Dt37+/us3Ly4vt27cv6+DgoPUzPTc3lxWJROzw4cPr9edNdegSm5n573//i9LSUmzYsAFubm5ay/38/DBz5swGqKzuCC2zoXn//vtvfPfdd4iMjMSsWbO01hOLxZg7d269nD0yhFKpxNq1a9GhQwdYW1vDxcUFAwYM0DhVLpfL8fHHH8PX11d9qXDhwoWQyWTqdRo1aoSePXtq7X/YsGEAnl5Kq+rs2bPIzs7GmDFjMGbMGJw6dQp///13HaQ03H//+188fvwYcXFxOo+zRCLBjBkz4OHhodG+fft2tGvXDuHh4ejXrx+2b99eXyXXmhAzV5WZmYmRI0fCyckJtra26N69O3799VeNdU6cOAGGYfDDDz9g4cKFaNGiBRo3bozBgwcjNzdXY92MjAwMHz4cLVq0gLW1NVq2bIkxY8agqKioPmMZrUOHDnB2dkZWVpZGu7W1NV566SXs2LFDo33nzp1o2rQpoqKi6rPMatElNjPzyy+/oFWrVjo/IPhKaJkNzXvw4EHI5XK8+uqr9VRZ9YqKipCfn6/RxjAMmjVrBgCYPHkyNm3ahOjoaLz++uuQy+U4ffo0zp07h65duwIAXn/9dWzevBkjRozAnDlzcP78eSxbtgx//vkn9u3bV+3r379/HwDg7OystWz79u3w9fXFCy+8gMDAQNja2mLnzp149913TRGdkwMHDsDPzw/BwcEGbyOTybB3717MmTMHAPDyyy/jtdde03k5whzxMbOu972u9yAAPHjwAD179kRZWRlmzJiBZs2aYfPmzRg8eDDi4+PVnXyVTz75BAzDYP78+Xj48CE+//xz9OvXD2lpabCxscGTJ08QFRUFmUyGt99+Gy1atMCdO3dw4MABPHr0CE2aNKmz3M+q6f++LoWFhSgsLNR5CX7s2LHo378/bt26BV9fXwDAjh07MGLECEilUtMWXxsNfQqL/KuoqIgFwA4ZMsSo7Sz5cpPQMhuTd/bs2SwA9tKlS3VeV3VUp9l1fVlZWbEs++8lkhkzZmhtr1QqWZZl2bS0NBYA+/rrr2ssnzt3rvrySnX69evHOjg4sIWFhRrtT548YZs1a8a+//776raxY8eynTp10tpHfV1iUx3noUOHai0rLCxk8/Ly1F9lZWXqZfHx8SwANiMjg2VZli0uLmatra11XmI1N3zLXN37XqXqJbZZs2axANjTp0+r20pKSlgfHx/W29ubVSgULMuybFJSEguAfe6559ji4mL1urt372YBsGvXrmVZlmUvXbrEAmD37NlTx2l1M+T/Pss+vcQ2efJkNi8vj3348CF7/vx5tm/fviwAdtWqVer1vLy82IEDB7JyuZxt0aIF+/HHH7Msy7LXr19nAbAnT56s90v61aFLbGakuLgYAMxmcGJ9EFpmY/Ka2/fm66+/xtGjRzW+Dh48CADYu3cvGIZBbGys1naqAdUJCQkAgHfeeUdjuerMQdXLEM/69NNPcezYMSxfvhyOjo4ayw4ePIh//vkHL7/8srrt5ZdfxuXLl3Ht2jXjg5qA6tjZ2dlpLQsLC4OLi4v66+uvv1Yv2759O7p27ar+rdve3h4DBw60iEtOfM2s632vT0JCArp164bevXur2+zs7DBlyhRkZ2fj+vXrGuuPHz9e4//3iBEj4Obmpv6/ojpDdPjwYZSVlZkyllGq+7+vsmHDBri4uMDV1RXBwcE4e/Ys3nnnHb3DA0aNGoWdO3cCePoe8PDwQJ8+feojjsHoEpsZcXBwAACUlJQ0cCX1R2iZjclrbt+bbt26qS+VVXXr1i24u7vDyclJ7/a3b9+GSCTSOuXeokULODo64vbt2zq3++GHH/DBBx9g8uTJ+M9//qO1fNu2bfDx8YGVlRX++usvAICvry9sbW2xfft2fPrpp4ZGNBnVh15paanWsu+++w4lJSV48OABXnnlFXX7o0ePkJCQgOnTp6tzAECvXr2wd+9e3Lx5E23atKn74jnia+bq3vdV3b59W+flxbZt26qXPzsDrnXr1hrrMQwDPz8/ZGdnAwB8fHzwzjvvYPXq1di+fTv69OmDwYMH45VXXqm3y2uAYd+DIUOGYPr06WAYBvb29mjfvj0aN26sd/2xY8fiiy++wOXLl7Fjxw6MGTNG5+zUhkQdJDPi4OAAd3d3/PHHHw1dSr0RWmZj8gYEBAAArl69iqCgoDqurP4Y80Pw6NGjGD9+PAYOHIhvv/1Wa3lxcTF++eUXVFRUaH3YAE/HNajGedSnJk2awM3NTedxVn2Aqj4EVfbs2QOZTIZVq1Zh1apVWttt374dS5YsqZN6TUGImevDqlWrMHHiRPz88884cuQIZsyYgWXLluHcuXNmM0EDAFq2bKn3/ka6BAcHw9fXF7NmzUJWVhbGjh1bh9VxQ5fYzMygQYNw69YtpKSkNHQp9UZomQ3NGx0dDbFYjG3bttVTZdz5+vri7t27KCgo0LuOl5cXlEolMjIyNNofPHiAR48eadwXBwDOnz+PYcOGoWvXrti9ezckEu3f53788UdUVFRg3bp12LNnj8bX0qVLcfv2bZw9e9Y0IY00cOBA/PXXX0hNTTVo/e3btyMwMFArx549e9CvXz+tWT/mSIiZn+Xl5YX09HSt9hs3bqiXP6vq/wWWZfHXX3/B29tbo71Dhw744IMPcOrUKZw+fRp37tzR+QuDpXn55Zdx4sQJtG3b1jx/CWzoQVBEk+o+Iu3atWPv37+vczmf7gnEssLLbEzet956iwXAfvHFF1rrKRQKduXKlWZxHyRjBmlPmTJFY/m8efO0Bmlfv36dbdasGdu+fXu2oKBA7+v27duXbdWqlc5lFRUVrJ2dHfvWW2+p2+rzPkg3b95kbW1t2fbt2+s8zpmZmSwAdsWKFWxOTg7LMAz70Ucf6dzX9u3bWQDsuXPn6rrsWuFTZi73QVIN0k5OTla3lZaWsq1atTJqkLbq/39RURFbWVmp8ZrFxcWsSCRi586da4qY1arNfZB0UQ3SVsnOzmZjY2PZhIQEo1+zPtAlNjPj6+uLHTt2YPTo0Wjbtq3GXZaTk5OxZ88e3j37R2iZjcm7atUq3Lp1CzNmzMCPP/6IQYMGoWnTpsjJycGePXtw48YNjBkzpl7qPnjwoPo34Wf17NkT4eHhePXVV/HFF18gIyMDAwYMgFKpxOnTpxEeHo7p06ejU6dOmDBhAtavX49Hjx4hNDQUqamp2Lx5M4YOHYrw8HAAT8dcRUVFobCwEO+++67W4G1fX1/06NEDd+/eRVJSEmbMmKGzXisrK0RFRWHPnj344osv6n36cOvWrbFjxw68/PLL8Pf3V99Vmv3/Ow3v2LEDIpEILVu2xI4dO8CyLAYPHqxzXzExMZBIJNi+fbtRU+jrmxAzP+u9997Dzp07ER0djRkzZsDJyQmbN29GVlYW9u7dC5FI86KNk5MTevfujddeew0PHjzA559/Dj8/P7zxxhsAgMTEREyfPh0jR45EmzZtIJfLsXXrVojFYgwfPrzeclX3f79Vq1ac9+vl5YXFixfXorI61rD9M6LPzZs32TfeeIP19vZmGzVqxNrb27O9evViv/zyS7aiokJjXUs+m/IsoWU2NK9cLmf/97//sX369GGbNGnCSqVS1svLi33ttdfq5RYA1U31BcDGxcWp61yxYgUbEBDANmrUiHVxcWGjo6PZ33//Xb2vyspKdsmSJayPjw8rlUpZDw8PdsGCBRp5s7Kyqn091W/sq1atYgGwx48f11v7pk2bWADszz//zLJs/Z5BUvnrr7/Y//znP6yfnx9rbW3N2tjYsAEBAexbb73FpqWlsSzLsh06dGA9PT2r3U9YWBjr6uqqdUbBHPEhM9c7ad+6dYsdMWIE6+joyFpbW7PdunVjDxw4oLGO6gzSzp072QULFrCurq6sjY0NO3DgQPb27dvq9TIzM9lJkyaxvr6+rLW1Nevk5MSGh4ezx44dM2lWfQz9vw+OZ5Cqe01zOIPEsCzL1ktPjBBCCCE4ceIEwsPDsWfPHowYMaKhyyF60CBtQgghhJAqqINECCGEEFIFdZAIIYQQQqqgMUiEEEIIIVXQGSRCCCGEkCqogyQgJ06cAMMwOHHiREOXQoggLV68mPNjT7Zu3YqAgABIpVKtB/YSQkyPbhRJCCFm7saNG5g4cSIGDBiA9957D7a2tg1dksl88803sLW15dXNYAk/UAeJEELM3IkTJ6BUKrF27Vr4+fk1dDkm9c0338DZ2Zk6SMTs0CU2C6BUKlFRUdHQZRBCGsjDhw8BgC6tEVKPqINUj1TjD27cuIFRo0bBwcEBzZo1w8yZMzU6QAzDYPr06di+fTvat28PKysrHDp0CABw584dTJo0Cc2bN4eVlRXat2+PjRs3ar3W33//jaFDh6Jx48ZwdXXF7NmzIZPJ6jXnzZs38corr6BJkyZwcXHBokWLwLIscnNzMWTIEDg4OKBFixZYtWqVxvYymQyxsbHw8/ODlZUVPDw8MG/ePK364+LiEBERAVdXV1hZWaFdu3ZYt26dVj2//fYboqKi4OzsDBsbG/j4+GDSpEl1+j0g5MyZM3jhhRdgbW0NX19ffPfddzrX27ZtG7p06QIbGxs4OTlhzJgxyM3NVS/39vZGbGwsAMDFxQUMwzTY86tu376NqVOnwt/fHzY2NmjWrBlGjhyJ7OxsjfX0jbXatGkTGIZRr+/t7Y1r167h5MmTYBgGDMMgLCxMvX5mZiZGjhwJJycn2Nraonv37lrP5iOkrtAltgYwatQoeHt7Y9myZTh37hy++OILFBYWYsuWLep1EhMTsXv3bkyfPh3Ozs7w9vbGgwcP0L17d3UHysXFBQcPHsTkyZNRXFyMWbNmAQDKy8vRt29f5OTkYMaMGXB3d8fWrVuRmJhYrzlVD2Ndvnw5fv31VyxduhROTk747rvvEBERgc8++wzbt2/H3Llz8cILLyAkJARKpRKDBw/GmTNnMGXKFLRt2xZXr17FmjVrcPPmTfz000/q/a9btw7t27fH4MGDIZFI8Msvv2Dq1KlQKpWYNm0agKe/effv3x8uLi5477334OjoiOzsbPz444/1+r0gwnL16lX1+27x4sWQy+WIjY1F8+bNNdb75JNPsGjRIowaNQqvv/468vLy8OWXXyIkJASXLl2Co6MjPv/8c2zZsgX79u3DunXrYGdnh44dOzZIrgsXLiA5ORljxoxBy5YtkZ2djXXr1iEsLAzXr183emzU559/jrfffht2dnZ4//33AUD9PXrw4AF69uyJsrIyzJgxA82aNcPmzZsxePBgxMfHY9iwYSbPR4iGBn0SnMDExsayANjBgwdrtE+dOpUFwF6+fJll2acP/hOJROy1a9c01ps8eTLr5ubG5ufna7SPGTOGbdKkCVtWVsayLMt+/vnnLAB29+7d6nUeP37M+vn5sQDYpKSkOkj3L1XOKVOmqNvkcjnbsmVLlmEYdvny5er2wsJC1sbGRv3Ax61bt7IikYg9ffq0xj6//fZbFgB79uxZdZsq77OioqLYVq1aqf+9b98+s3nwIRGOoUOHstbW1hoPHr1+/TorFotZ1Y/d7OxsViwWs5988onGtlevXmUlEolGu+r/VF5eXv0E0EPX/7mUlBQWALtlyxZ1m6reqlQPIs3KylK3tW/fng0NDdVad9asWSwAjZ8FJSUlrI+PD+vt7c0qFIrahSGkBnSJrQGozm6ovP322wCAhIQEdVtoaCjatWun/jfLsti7dy9efPFFsCyL/Px89VdUVBSKiopw8eJF9X7c3Nw0HoJoa2uLKVOm1GUsLa+//rr672KxGF27dgXLspg8ebK63dHREf7+/sjMzAQA7NmzB23btkVAQIBGxoiICABAUlKSelsbGxv134uKipCfn4/Q0FBkZmaiqKhIvX8AOHDgACorK+ssKyEqCoUChw8fxtChQ+Hp6alub9u2LaKiotT//vHHH6FUKjFq1CiN93qLFi3QunVrjfe6uXj2/1xlZSX++ecf+Pn5wdHRUf3zx1QSEhLQrVs39O7dW91mZ2eHKVOmIDs7G9evXzfp6xFSFV1iawCtW7fW+Levry9EIpHGdXwfHx+NdfLy8vDo0SOsX78e69ev17lf1UDO27dvw8/PT2sMgL+/vwmqN9yzHw4A0KRJE1hbW8PZ2Vmr/Z9//gEAZGRk4M8//4SLi4vOfaoyAsDZs2cRGxuLlJQUlJWVaaxXVFSEJk2aIDQ0FMOHD8eSJUuwZs0ahIWFYejQoRg7diysrKxMEZMQDXl5eSgvL9f6fw48/T+o+kUoIyMDLMvqXA8ApFJpndbJRXl5OZYtW4a4uDjcuXMH7DMPYlD9UmIqt2/fRnBwsFZ727Zt1csDAwNN+pqEPIs6SGZA12DGZ39TA57OZAOAV155BRMmTNC5n4Yal6CPWCw2qA2A+getUqlEhw4dsHr1ap3reXh4AABu3bqFvn37IiAgAKtXr4aHhwcaNWqEhIQErFmzRv39YhgG8fHxOHfuHH755RccPnwYkyZNwqpVq3Du3DnY2dmZIiohRlMqlWAYBgcPHtT5/8Ic35tvv/024uLiMGvWLPTo0QNNmjQBwzAYM2aM+v8coPtnGvD07BohloI6SA0gIyND4wzRX3/9BaVSCW9vb73buLi4wN7eHgqFAv369at2/15eXvjjjz/AsqzGD6r09PRa117XfH19cfnyZfTt27faOw7/8ssvkMlk2L9/v8aZKn2XJbp3747u3bvjk08+wY4dOzBu3Djs2rVL4zIgIabg4uICGxsbZGRkaC179v+gr68vWJaFj48P2rRpU58lchYfH48JEyZozDytqKjAo0ePNNZr2rQpAODRo0catya4ffu21j71/T/38vLS+TPrxo0b6uWE1CUag9QAvv76a41/f/nllwCA6OhovduIxWIMHz4ce/fuxR9//KG1PC8vT/33mJgY3L17F/Hx8eq2srIyvZfmzMmoUaNw584dfP/991rLysvL8fjxYwD/nomqeoo/Li5OY5vCwkKNdQAgKCgIAOrttgdEWMRiMaKiovDTTz8hJydH3f7nn3/i8OHD6n+/9NJLEIvFWLJkidZ7lGVZ9WVncyIWi7Vq/fLLL7XODPn6+gIATp06pW57/PgxNm/erLXPxo0ba3WwgKc/x1JTU5GSkqKxj/Xr18Pb21tjjCYhdYHOIDWArKwsDB48GAMGDEBKSgq2bduGsWPHolOnTtVut3z5ciQlJSE4OBhvvPEG2rVrh4KCAly8eBHHjh1DQUEBAOCNN97AV199hfHjx+P333+Hm5sbtm7dahGPJ3j11Vexe/duvPXWW0hKSkKvXr2gUChw48YN7N69G4cPH0bXrl3Rv39/NGrUCC+++CLefPNNlJaW4vvvv4erqyvu3bun3t/mzZvxzTffYNiwYfD19UVJSQm+//57ODg4ICYmpgGTEj5bsmQJDh06hD59+mDq1KmQy+X48ssv0b59e1y5cgXA007E0qVLsWDBAmRnZ2Po0KGwt7dHVlYW9u3bhylTpmDu3LkNnETToEGDsHXrVjRp0gTt2rVDSkoKjh07hmbNmmms179/f3h6emLy5Ml49913IRaLsXHjRri4uGh0GgGgS5cuWLduHZYuXQo/Pz+4uroiIiIC7733Hnbu3Ino6GjMmDEDTk5O2Lx5M7KysrB3716IRPT7PaljDTJ3TqBUU1+vX7/OjhgxgrW3t2ebNm3KTp8+nS0vL1evB4CdNm2azn08ePCAnTZtGuvh4cFKpVK2RYsWbN++fdn169drrHf79m128ODBrK2tLevs7MzOnDmTPXToUL1O8686JXnChAls48aNtdYPDQ1l27dvr/73kydP2M8++4xt3749a2VlxTZt2pTt0qULu2TJEraoqEi93v79+9mOHTuy1tbWrLe3N/vZZ5+xGzdu1JhGfPHiRfbll19mPT09WSsrK9bV1ZUdNGgQ+9tvv9VNeEL+38mTJ9kuXbqwjRo1Ylu1asV+++23Oqe/7927l+3duzfbuHFjtnHjxmxAQAA7bdo0Nj09Xb2OuUzzLywsZF977TXW2dmZtbOzY6OiotgbN26wXl5e6lt1qPz+++9scHAw26hRI9bT05NdvXq1zmn+9+/fZwcOHMja29uzADSm/N+6dYsdMWIE6+joyFpbW7PdunVjDxw4UD9hieAxLFvlfCmpM4sXL8aSJUuQl5enNZOLEEIIIeaDzlESQgghhFRBHSRCCCGEkCqog0QIIYQQUgWNQSKEEEIIqYLOIBFCCCGEVEEdJEIIIYSQKqiDRAghhBBSBXWQCCGEEEKqoA4SIYQQQkgV1EEixAxs2rQJDMNg06ZNDV2KQRYvXgyGYXDixImGLsWsZWdng2EYTJw4sUFePywsDAzDNMhrm6uGPibEclAHiZB6YGk/lE+cOAGGYbB48eKGLsXsUSfEeN7e3vD29m7oMgiplqShCyCEAMOGDUP37t3h5ubW0KUYZPr06RgzZgw8PT0buhRCCKkT1EEixAw0adIETZo0aegyDObs7EwPXCaE8BpdYiNEh1OnTuHFF1+Es7MzrKys0Lp1a3zwwQcoKyvTWnfv3r0IDQ2Fq6srrK2t4e7ujn79+mHv3r0Ano4v8vHxAQBs3rwZDMOov1RjePSNQWIYBmFhYbhz5w7Gjh0LZ2dn2NvbY+DAgcjMzAQA/Pnnnxg6dCicnJxgb2+PESNG4MGDB1p1bty4EUOGDIG3tzesra3h5OSEqKgoJCUlaay3ePFihIeHAwCWLFmiUW92drZ6HX1jkH755ReEh4ejSZMmsLGxQadOnbB69WrI5XKN9Z697PjXX39h2LBhaNq0KRo3box+/frh8uXL1R+kZ0ycOBEMwyAzMxMrV65EmzZtYGNjg3bt2mHXrl0AgCdPnuD9999X5+/YsSMOHjyoc38lJSWIjY1F+/btYWNjA0dHR0RFReHMmTMa6zEMg5MnT6r/rvrSdSnVmIx//PEHRo0aBVdXV1hZWcHHxwezZs3CP//8o3P9M2fOIDQ0FI0bN0azZs0wevRo5ObmGvrt0xAXF4fg4GDY2dnBzs4OwcHBOsfGPXsZNjk5Gf3794ejo2O1lxtVx/z27du4ffu2xves6uVcQ+vguj4hNaEzSIRUsW7dOkybNg2Ojo548cUX4erqit9++w2ffPIJkpKSkJSUhEaNGqnXnTp1Ktzc3DBs2DA0a9YM9+/fR2pqKvbt24fhw4cjKCgIM2fOxNq1a9GpUycMHTpU/VqGjMMoLCxE79690aJFC0yYMAE3b97EgQMHcOPGDfz888/o06cPunTpgkmTJuH333/H3r17UVBQgMTERI39TJs2DZ06dUK/fv3g4uKCO3fu4KeffkK/fv3w448/YsiQIQCejqnJzs7G5s2bERoairCwMPU+HB0dq6119erVmDNnDpycnDB27Fg0btwY+/fvx5w5c3D69Gn8+OOPWh+g2dnZ6N69O9q3b49Jkybh1q1b+PnnnxEeHo4///wTzZs3r/F7pPLOO+/g/PnzePHFFyEWi7Fr1y6MHTsWTZs2xZdffonr169j4MCBqKiowI4dOzBkyBD8+eef8PX1Ve+joKAAISEhuHbtGnr16oW33noLxcXF6pr27NmjPoaxsbHYtGkTbt++jdjYWPU+goKCOGc8c+YMoqKi8OTJE4wYMQLe3t5ISUnB2rVrceDAAZw7d07j7N3x48cRHR0NkUiE0aNHw93dHcePH0evXr3QtGlTg793ADBjxgz8X3v3HtPUFccB/HvbIuWZCoLKcA02+EgQFFcfEUsgJgt1KBElSDRMzIya1EecaIwJ0WwDJ/gigWVLxGGMssw/FmZ8g0BQ3HAa4lQC4hsFFBFEwVp++8Pcu962lBaYM/H3SfzDc07P+Z1za/nRe88xPz8fn3zyCVauXAng3S8AK1aswNWrV7F//36711y8eBHfffcd4uLisGrVKty/f7/f/jUaDbKysrBv3z4AwIYNG6Q66/eZu3EMJm7GBkSMMcnff/9NKpWKoqKi6OnTp7K67OxsAkC5ublSWXR0NI0YMYJaWlrs+rJ+/Z07dwgApaenOxy3qKiIAFBRUZGsHAABoI0bN8rK16xZQwBIo9HQvn37pPK+vj4yGo0EgK5cuSJ7TVNTk924zc3NFBISQuHh4bLy8vJyAkBZWVkO483KyiIAVF5eLpU1NjaSSqWi4OBgun//vlTe09NDMTExBICKi4ulcnFNAFBOTo6s/+3btxMAys7Odji+rfT0dAJAEyZMoNbWVqn88uXL0jrFxMTQy5cvpbqSkhICQCaTSdZXWloaAaCffvpJVt7S0kLjxo2joKAgev36tVQeGxtL/X2UujtHi8VCOp2OANCpU6dk7Tdv3kwAKCMjQ9Z+/PjxJAgCVVVVSeV9fX3SPFz9mK+oqCAANHnyZOro6JDK29vbacKECQSAKisrpXLxPQKADh486NIYIq1WS1qtdljicLf9QP8WGRNxgsSYlXXr1tl9oIosFgsFBQXR9OnTpbLo6Gjy8fGh9vZ2p/0OJUHy9fWl7u5uWXllZSUBIJ1OR319fbK64uJit35omUwmAkB3796VygaTIO3cuZMA0K5du+zaV1dXEwCKj4+XysQ1CQsLI4vFImsv1i1atMilOYgJ0s8//2xXN378eAJAFRUVsvK3b9+Sh4cHGQwGqaytrY2USqUsTmsHDhwgAFRaWiqVuZIguTpH8bomJCTY9dXV1UUBAQGkVqupt7eXiP5NDhITE+3a3717l5RKpcsJUkZGBgGgkpISu7ojR47YJWfieyQ6Otql/q05S5DcjcPd9pwgMVfxLTbGrNTU1AAATp8+jfPnz9vVe3h44NatW9LfU1NTkZmZiYiICKSlpSEuLg4xMTHw9/cftpjCw8Ph7e0tKxN3u0VGRtrdshLrmpubZeVNTU3Izs5GWVkZHj16hN7eXll9c3MztFrtoOO8evUqAPmtEtHs2bOhVqtx7do1u7qpU6dCoZA/DhkaGgoA6OjocCsG21tbwLv1aGpqsqtTKpUIDg6WrdOff/4Ji8WC3t5eh0ccNDQ0AABu3bqFL774wq24XJmjszX09fXFZ599hjNnzqC+vh5TpkyRnmGaO3euXXutVotx48ZJz40NxNnY4jNpjq6fXq93qX9XuRvHYONmbCCcIDFmpb29HQDw7bffutT+66+/RmBgIAoLC5GXl4fc3FyoVCrMnz8fe/fulR7OHgpHyZZKpRqwzmw2S2WNjY2YMWMGOjs7ERcXh8TERPj7+0OhUODChQuoqKiwS5jc1dnZCQAOnxkSBAGjR4/Go0eP7OqczcFisbgVw2DWynqdxOtfXV2N6urqfsfp7u4etris5+hsDYF/k1+x3YsXLwAAwcHBDtuPHj3a5QSps7MTCoUCQUFBDvsRBEEa17ZuOLkbx2DjZmwgnCAxZkX8QdbZ2Qk/P78B2wuCgIyMDGRkZODZs2eoqqrC0aNH8csvv6ChoQF1dXVQKpX/ddgD2rt3L54/f47Dhw9j2bJlsrrVq1dLO7GGQly7lpYWu2+iiAgtLS3D+s3af0GMb9OmTcjNzf3fxne0CxEAnjx5ImsnHg3R2trqsH1//fQ3dl9fH9ra2uwSrtbWVhCRw+s33IdkuhvHYONmbCC8zZ8xKzNnzgTw7602dwQGBiIpKQklJSWIj4/HjRs30NjYCABSkuTuNyLD5fbt2wAg7VQTEZHDb0oGE++0adMAwOHW/8uXL6Onp8fhLbAPiV6vhyAIuHTpksuvGc5r62wNu7u7UVtbCy8vL0ycOBEAEBUVBQCoqqqya3/v3j23tvo7G1ssG67rp1Qq+10vd+N4n3GzjwsnSIxZWbt2LVQqFUwmk8Ptyh0dHdIzD8C7D2AikrUxm83SrRq1Wg0AGDlyJARBGPTZNEMlfqNje45PTk4Orl+/btc+ICAAANyKNy0tDSqVCnv27JE91/PmzRts2bIFAD74/2plzJgxSElJwcWLF7F79267awu8S/asz8MazFr1Z86cOdDpdDh58iTOnTsnq/vmm2/w7NkzLF26VDpmIiYmBmFhYfj9999l15aIsG3bNreStvT0dADvzr6yviX14sUL7NixQ9ZmqAICAvD06VP09PQMOY73GTf7uPAtNsasREREoKCgAGvWrMHEiRNhNBqh0+nQ1dWFpqYmVFRU4Msvv8QPP/wAAEhKSoK/vz9mzZoFrVYLs9mMs2fP4saNG1i8eLGUmPj6+kKv16OyshLLly9HeHg4FAoFli9fPqQHo121evVqFBUVITk5GSkpKQgMDERNTQ3++usvzJ8/HydOnJC1nzRpEkJCQnDs2DF4enoiNDQUgiDAZDL1e+K3TqfDrl27sGnTJkRGRiIlJQU+Pj4oLS1FfX09Fi5caHd770NUUFCA+vp6ZGZm4vDhw5g9ezY0Gg0ePHiA2tpaNDQ04PHjx9KD8/Hx8fj111+RnJyMhIQEqNVqREVFITEx0e2xFQoFDh06hM8//xxGoxFLliyBVqvFpUuXcOHCBeh0OuTk5Mja//jjjzAajZg3b550DlJZWRkeP36MyMhI1NXVuTS2wWCAyWRCfn4+IiIikJycDCLC8ePH8fDhQ6xbtw4Gg8HtOTkSHx+P2tpaJCQkYO7cuRgxYgQMBoP0x5043mfc7CPzP+2eY+yD9scff1BqaiqFhISQh4cHjRo1iqKjo2nr1q108+ZNqV1BQQEtWLCAtFotqdVqCgwMpBkzZlBhYSG9efNG1md9fT0ZjUbSaDQkCIJsm7yzbf6xsbF28TnbqtzfFv3y8nKaM2cO+fn5kUajIaPRSFeuXHG4ZZ+IqKamhmJjY8nPz0867+bOnTtE5Hibv+i3336TXufp6UlTpkyhvLw8MpvNLs/B2dwdEbf5i/FZc7YNv7/t5q9evaLvv/+epk+fTj4+PuTl5UVhYWGUlJRExcXFsrmYzWbKzMykTz/9lFQqlWxOg51jXV0dLV68mEaNGkUeHh6k1Wpp/fr11NbW5rCfyspKMhgM5OXlRQEBAbRkyRK6d++e07n35+DBg6TX68nb25u8vb1Jr9c7PDJioKMgnOnq6qKvvvqKxo4dKx1FYNuPq3G42563+TNXCUQOvkNmjDHGGPuI8TNIjDHGGGM2OEFijDHGGLPBCRJjjDHGmA1OkBhjjDHGbHCCxBhjjDFmgxMkxhhjjDEbnCAxxhhjjNngBIkxxhhjzAYnSIwxxhhjNjhBYowxxhizwQkSY4wxxpgNTpAYY4wxxmz8A1Og6LTfYtBmAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkgAAAHPCAYAAACoQyVSAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAACgCklEQVR4nOzdeVxUVf8H8M+dGWRRUBFQSDZBwdwwTdxY3cFcci1zKdR6ylzSNC1TytJyyzbLHsV9xSwzXFBxR7EUNU0lASEXBFkVGJmZ+/vD39yHYQaYGQZmzvX7fr14Kecu8/0ww8zh3nPu5Xie50EIIYQQQgQScxdACCGEEGJpqINECCGEEFIBdZAIIYQQQiqgDhIhhBBCSAXUQSKEEEIIqYA6SIQQQgghFVAHiRBCCCGkAuogEUIIIYRUQB0kQgghhJAKqINEiBlwHGfwV2hoaK3UsnDhQnAch4ULF5pkf+np6eA4Dl5eXibZ37NgwoQJwvMcEBBQ5brnz5/XeF2cOnVKY7n6+eQ4DgMHDqx0P5s3b670daXe/tixY1rL5HI5vv76awQHB8PR0RFWVlZwcnJC69atMXLkSKxatQrZ2dlatRjypetxCalrMnMXQMizaPz48Vpt9+/fx8GDBytd7u/vX+t1EfO7dOkS/vzzT3Tq1Enn8rVr1+q9r99//x0nTpxAcHCwSWrLyspCnz59cOXKFUilUnTp0gXu7u5QqVS4efMmdu/ejV27dsHHxwcDBw5EQECAztfygQMHkJWVhQ4dOujsEDZr1swk9RJSE9RBIsQM1q9fr9V27NgxoYOka3ltmTJlCkaPHg0nJyeT7O+5557D33//DSsrK5Ps71nSuXNn/PHHH1i3bp3ODlJJSQm2b98OV1dXSKVS/Pvvv5Xuy87ODsXFxZgzZw4SExNNUt+UKVNw5coVtGnTBr///js8PT01lj948ADbtm1D06ZNAQBDhgzBkCFDtPYTGhqKrKwsDBkyxGRHLgkxNTrFRsgzzsnJCf7+/ibrIFlZWcHf3x8+Pj4m2d+zJDIyEk2bNsW2bdtQWlqqtTw2NhYFBQUYN24cpFJplfsaOnQo3N3dcfbsWezZs6fGtZWWluLXX38FAKxYsUKrcwQALi4umDZtGl588cUaPx4h5kYdJEIYUH6cUEZGBqKiouDu7g4rKytMmDBBWO/nn3/GxIkT0bZtWzRu3Bg2Njbw9vbGG2+8gRs3blS77/LWr18PjuMwYcIEPH78GHPnzoWvry+sra3RrFkzjB8/Hnfu3NHaX1VjkNRjTABg9+7d6NmzJxwcHFC/fn306NEDcXFxlf4Mbt++jQkTJqBZs2awsbFBy5YtsWDBApSWliI0NNSosSsKhQI//PADunfvjoYNGwr7nTp1qs5sNc1QHZlMhrFjxyIvL09np2bdunUAgDfeeKPafdnY2OCTTz4BAMybNw9KpdLougAgNzcXZWVlAJ52hAgRO+ogEcKQlJQUdOzYEXFxcQgMDMSgQYM0jvyMHDkS27Ztg62tLcLDw9GvXz9IJBLExMSgU6dOOHPmjMGPWVBQgO7du+OHH37A888/jwEDBoDneWzcuBE9evRAQUGBwftcsGABRowYAQCIiIhAy5YtcebMGQwcOFBnx+DatWvo3LkzNmzYAKlUisGDB8PPzw/Lly9Hnz59hA9uQ8jlcgwYMAD/+c9/cPHiRfTo0QNDhgyBXC7HN998g4CAAFy4cMFkGfSl7vyoO0Nqt27dwvHjx9GjRw+0atVKr32NGzcObdu2xfXr17X2ZygnJyfY2dkBAL755huoVKoa7Y8Qi8cTQixCQkICD4DX9Wu5YMECYdlrr73Gl5aW6tzH9u3b+UePHmm0qVQq/rvvvuMB8G3atOFVKpXOfS9YsECjPSYmRnjMfv368QUFBcKy3NxcPiAggAfAf/755xrbpaWl8QB4T09PrfrU+2vUqBF/9uxZnXW0atVKa7sXXniBB8CPHj1aI/u///7L+/n5CftNSEjQ+XPRZc6cOTwA3sfHh09LSxPanzx5wkdFRfEAeG9vb14ul5skQ1XGjx/PA+A//fRTnud5vlu3brxEIuFv374trPPhhx/yAPh169bxPM/znp6ePAD+5MmTOmuIiorieZ7n9+7dywPgn3vuOb64uFhYb9OmTTwAPiQkRKueyn6e06ZNE5Z5eXnx7777Lr9p0yb+6tWrWq+rqoSEhOh8zRFiSegIEiEMcXR0xLfffgtra2udy0eNGoX69etrtHEch7fffhvdunXD1atX8ffffxv0mPXr10dMTAwcHByEtsaNG+ODDz4AABw+fNjAFMAnn3yCwMBAjba5c+eiYcOGuHnzJjIzM4X2kydP4sKFC2jQoAG+++47jezPPfccli9fbvDjl5aW4rvvvgMArFy5UuN0oJWVFb7++ms0bdoUaWlpiI2NrXEGQ73xxhtQqVSIiYkBAKhUKmzYsAENGjTAyJEjDdrXSy+9hKCgINy5cwerVq0yuiYAWLp0KaZPnw4rKyukp6fjm2++wdixY9GmTRu4uLhgypQplZ6aJIQ11EEihCG9e/dGw4YNq1znn3/+wbfffovp06cjKioKEyZMwIQJE5CVlQUAlY5Fqkznzp3h6uqq1d66dWsAMOoD8aWXXtJqs7a2RosWLbT2efz4cQBA//794ejoqLVdZGQkGjVqZNDj//HHH3j06BEcHR111mJnZ4fRo0cDABISEmqcwVDqju769evB8zwOHjyIf//9FyNHjtTqAOvjiy++EP7Nzc01ui4rKyusXLkSGRkZWL16NV599VX4+/uD4zjk5OTgu+++Q/v27fHnn38a/RiEWAqa5k8IQ6q6+KJSqcSUKVPw448/guf5StcrLCw06DE9PDx0tquPKOmabWXKfaqnsleV3dPTE/n5+Xo/vrrz4u3tXek66ll4lXV0auPnomZvb4/hw4djw4YNOHr0qEGDs3Xp1q0bhgwZgl9++QWff/45li1bZnRtwNPrFL311lt46623ADy9PtLWrVsRHR2N3NxcjBs3DlevXq3RYxBibnQEiRCG2NraVrps1apV+OGHH9C0aVNs3boV6enpKCkpAc/z4Hker7zyCgBU2XnSRSIx/duEMftUzxwzdFltqY2fS3nqztDSpUuxd+9e+Pn5oUePHkbv7/PPP4dUKsV3331Xo9N/ujRt2hQzZswQTgleu3YNKSkpJn0MQuoadZAIEYmdO3cCAH788Ue88sor8PT0hI2NjbCc1Q+s5557DsDTywdU5vbt20btMy0trdJ1UlNTNdata8HBwfD19cXBgwfx5MkTvP766zXaX+vWrTFhwgSUlpbi448/NlGVmvr27Sv8Pycnp1Yeg5C6Qh0kQkRCPbZE1wX8rl69iuTk5DquyDTUt8k4cOAA8vLytJbv379fZ3tVOnfujAYNGiA3Nxd79+7VWq6+YjUAhIWFGVG1abz11lto0qQJXFxcMG7cuBrvLzo6Gra2tti4caPBp8D0OfKYkZEh/N9cHUtCTIU6SISIhHrQ9HfffadxjZp79+5h3LhxUCgU5iqtRoKDg9GhQwcUFRXh3XffxZMnT4Rld+/excyZMw3ep42NDd555x0AwMyZMzWOQJWVlWHatGm4f/8+vL29MXz48JqHMNLMmTORk5ODrKwsnQPlDfXcc8/h3XffhUqlwtdff23QtgUFBXjhhRewadMmPHr0SGt5amqqcFqwe/fulY7RIoQVNEibEJGYN28eDhw4gJ9++gkJCQl44YUXUFhYiOPHj6NFixYYOnSoSW45Udc4jsPmzZsREhKCLVu24NixY+jRoweKi4uRkJCAgIAAdOvWDYmJiahXr57e+42OjsYff/yBI0eOoHXr1ggLC4O9vT0SExORkZGBJk2aYNeuXQbtkwVz587FTz/9ZPBRNwC4ePEixo0bB2tra3To0AGenp7geR6ZmZk4f/48VCoVPD096/RegoTUFjqCRIhIBAYG4o8//sCgQYPw+PFj7N27F7du3cK7776LxMREjesYsaZt27b4888/MXbsWJSVleGXX37B33//jWnTpiE+Pl64hIEh95OztrbGgQMH8P3336NDhw44efIk9uzZAysrK7z77ru4dOmSzhvGsq5Ro0aYO3euwds1bNgQ586dw+eff46QkBDk5eXhwIED+PXXX5GWloaQkBCsWLECV69eRcuWLWuhckLqFscbOqWFEEIsSFpaGnx9fWFvb4/c3Nxan11GCHk20DsJIcTiPX78WOeg4tu3b2PMmDFQqVQYP348dY4IISZDR5AIIRYvPT0d3t7e8PHxQatWreDg4ICMjAxcuHABcrkcHTp0wIkTJ5g+jUgIsSzUQSKEWLxHjx4hOjoaR48eRUZGBvLz82FnZwc/Pz8MGzYM7777rnCneUIIMQXqIBFCCCGEVEAn7AkhhBBCKqDrIFVCpVLh7t27sLe3N8t9ngghhBBiWjzPo6ioCG5ubtVO6qAOUiXu3r0Ld3d3c5dBCCGEEBPLzMxE8+bNq1yHOkiVsLe3B/D0h2gJM2PUvV6xHdGiXGyhXOwQYyaAcrHG0nIVFhbC3d1d+IyvCnWQKqF+Ih0cHCyig1RWVoZjx44hIiICVlZW5i7HZCgXWygXO8SYCaBcrLHUXPp01miQNiGEEEJIBdRBIoQQQgipgDpIDJHJxHlGlHKxhXKxQ4yZAMrFGlZz0YUiK1FYWIiGDRuioKDAIsYgEUIIIaRmDPlspyNIjFCpVHjw4AFUKpW5SzEpysUWysUOMWYCKBdrWM5FHSRGKJVKJCYmQqlUmrsUk6JcbKFc7BBjJoBysYblXNRBIoQQQgipgDpIhBBCCCEV1Gho+e3bt/Hvv/8iJycHdnZ2cHZ2hr+/P2xsbExVH/l/HMdZzJVITYlysYVysUOMmQDKxRqWcxk8iy0hIQHr16/HkSNHcO/ePa3lVlZW6Ny5M4YOHYoJEyagSZMmJiu2LtEsNkIIIURcDPls17uDtHPnTixYsAA3b94Ez/Nwd3dH586d0bRpUzg6OqKkpAS5ubm4ceMGLl68CLlcDmtra7z22mv45JNP4OrqapJwdcXSOkgqlQqZmZlwd3ev9g7ELKFcbKFc7BBjJoByscbSchny2a7XKbauXbsiKSkJL7zwApYvX44RI0bgueeeq3T9srIynDhxAps3b8bOnTuxfft2bNy4EUOHDjUsCREolUokJyfDzc3NIl5kpkK52GLJuXieR3FxseHbKRR4fP4sbp06iYa9e8Oq/QuAVKr39nZ2dhZ5+sCSn6uaoFxsYTmXXh2kevXq4fDhwwgPD9drp1ZWVujVqxd69eqFlStXYvny5cjIyKhRoYQQUpXi4mI0aNDAoG0GOTfGl6080NzGGoEAcCoe/5bKMftmBvZm5+m1j0ePHqF+/fqGF0wIsWh6dZBOnDhh9AM0atQIn376qdHbE0JIbRjk3Bib2/lqtbtZ18Pmdr547co/eneSCCHiw+YNUp5BHMfB2dnZIg/l1wTlYosl57Kzs8OjR4/0W1mpBCa9CjzM1lok+f9sW8O7A2u2Vnu6zc7OzuBa64IlP1c1QbnYwnIuuhdbJSxtkDYhxHSUyX+idNbb1a5ns+x7SAM61UFFhJC6YPJB2rpkZ2cjJiYG58+fR35+vs7LiHMchyNHjhj7EKQcpVKJlJQUtGzZElIDBpBaOsrFFrHk4nNzTLqeJRLLc1UR5WILy7mM6iBdvnwZ4eHhyMvLQ1UHoFg8pGapVCoVbty4AR8fH+ZeZFWhXGwRSy7O0cmk61kisTxXFVEutrCcy6gO0syZM5Gbm4uPPvoIUVFRaN68OXPBCSHiYtA0/xYtgSbOOscgCZycUdqiJfD4cZW7stRp/oSQmjGqg5SYmIghQ4bgk08+MXU9hBBiFEOn+ZefxSYp18FR/f9R8deOnsHehg2r3Q9N8ydEnIy6alO9evXg4+Nj6lpIFSQSCTw8PJi70FZ1KBdbxJRrb3YeXrvyD+7Kn2i035E/EcUUfzE9V+VRLrawnMuoWWxDhw5Ffn4+EhISaqMmi0Cz2AhhS02upF168Q9w+bmwbuoKrk17UVxJmxCizZDPdqO6dMuWLcNff/2FZcuWGVUgMZxSqcTFixd1zhZkGeViiyXn4jgO9evXN/irQcOGaBwUigxPX9h26Yb6Dg4GbW+pnSNLfq5qgnKxheVceo1BeuONN7Ta2rZtizlz5uCHH35AQECAzp4Yx3FYu3ZtzaskUKlUyMjIQNu2bUU1IJ5ysYVysUOMmQDKxRqWc+nVQVq/fn2ly1JTU5GamqpzGXWQCCGEEMIivTpIaWlptV0HIYQQQojF0KuD5OnpWdt1kGpIJBL4+fkxOROgKpSLLZSLHWLMBFAu1rCcS+9ZbDExMQgPD39mOks0i40QQggRl1qZxRYVFYUWLVqgRYsWiIqKwubNm3Hnzp0aF0v0o1AocObMGSgUCnOXYlKUiy2Uix1izARQLtawnEvvDtLUqVPRrl073L59GzExMRg/fjw8PDzQqlUrvPnmm9ixYweysrJqs9ZnGs/zyM7OrvLedyyiXGyhXOwQYyaAcrGG5Vx632rkq6++AgDk5eXh+PHjSEhIwLFjx/DXX3/hn3/+wX//+18AgL+/P8LCwhAWFobQ0FA0adKkVgonhBBCCKktBt+LrXHjxhgyZAiGDBkCAMjNzcWxY8eEDtO1a9dw/fp1rF69GhKJBGVlZaaumRBCCCGkVtV4WLmjoyNefvllfPPNN/jzzz+xY8cOtGnTBjzPQ6VSmaJGAkAqlSIgIIC5C21Vh3KxhXKxQ4yZAMrFGpZzGXUvNjWFQoFz584hISEBCQkJSExMhFwuB8/z8Pf3R0hICFavXm3wfuVyOT7++GNs2rQJeXl5aN++PRYtWoQ+ffpUuZ2Xlxdu376tc5mvry9SUlL0roFmsRFCCCHiYshnu0Gn2FQqFc6fPy90iE6fPo2SkhIAwPPPP4+oqCiEhIQgODgYLi4uRgeYMGECYmNjMX36dLRs2RLr169HREQEEhIS0LNnz0q3++qrr/Do0SONttu3b+Ojjz5C3759ja7HEigUCpw4cQLBwcGQyQw+M2qxKBdbKBc7xJgJoFysYTmX3tVGRETg9OnTePToETiOQ7t27TBx4kShQ2SqwdhJSUnYvn07li5dilmzZgEAxo0bh7Zt22L27Nk4c+ZMpduqx0WVt2jRIgDAmDFjTFKfufA8j6KiIiZnAlSFcrGFcrFDjJkAysUalnPp3UE6cOAAJBIJXn75ZXz88cdo165drRQUGxsLqVSKyZMnC202NjaIiorCvHnzkJmZCXd3d733t3XrVnh7e6N79+61US4hhBBCREjvDtKgQYNw8uRJ7N69Gz///DO8vb2FqfwhISFo3ry5SQq6ePEiWrVqpXVusEuXLgCA5ORkvTtIFy9exN9//40PP/yw2nXlcjnkcrnwfWFhIQCgrKxMmIknkUgglUqhVCo1BqCr2xUKhUYvWSqVQiKRVNpecYaf+vBjxQtqyWQyYfvy21hZWUGlUkGpVAptHMdBJpNV2l5Z7ebIVD6P+l8xZFIoFBq5xJJJnUdNLJnK51KpVBr7oUyWlak8MWVSv0eUzyWWTOXfCy0hkyEz6/XuIP3yyy/geR7JycnClP7Y2FisXbsWHMfBy8sLoaGhwpchR3nKu3fvHlxdXbXa1W13797Ve19btmwBoN/ptcWLFyM6Olqr/dChQ7CzswMAeHh4oGPHjrh8+TIyMjKEdfz8/ODv74+kpCRkZ2cL7QEBAfD09MSJEydQVFQktHfr1g0uLi44dOiQxosqLCwMtra2iIuL06ghIiJCGOsVHx8P4OmLMjIyEjk5OUhMTBTWtbe3R3h4ODIzM5GcnCy0Ozs7o3v37khJScGNGzeEdnNnSkhIEHKJLRMAJCQkiC6TVCqFVCoVVSaZTIZu3bohLy8P586do0wWnGnAgAFo3bq18F4ohkyRkZHIy8sD8L/3eDFkKv8eER8fbxGZ1Ac/9FGjWWwqlQoXL17E0aNHcezYMZw6dQpFRUVChyksLEy4gKS+fHx84Ofnp/UkpKamwsfHBytXrsT06dP1qs3DwwMuLi64cOFCtevrOoLk7u6OnJwc4WiWpfbQxfJXPGWiTJSJMlEmylSbmQoLC+Hk5KTfDHXehJRKJR8bG8u3a9eO5ziOl0gkBu+jTZs2fHh4uFb71atXeQD8Dz/8oNd+jh49ygPgly1bZnANPM/zBQUFPAC+oKDAqO1N7cmTJ/y+ffv4J0+emLsUk6JcbKFc7BBjJp6nXKyxtFyGfLbXeM7dvXv3hFNuCQkJSE1N1ei1GcrV1VXnTXDv3bsHAHBzc9NrP1u2bIFEIsErr7xicA2WisWb/emDcrGFcrFDjJkAysUaVnMZ3EHKysrSuLWI+uKLPM+D4zh06NBBuBdbcHCwwQUFBAQgISEBhYWFGoe/1OfQAwICqt2HXC7H7t27ERoaqneHihBCCCFETe8O0ttvv41jx44JA6jUHaK2bdsiNDQUYWFhCAkJQePGjWtU0PDhw7Fs2TKsWbNGuA6SXC5HTEwMAgMDhcHfGRkZKC4uhr+/v9Y+4uLikJ+fz/y1jwghhBBiHnoP0pZInt62rXXr1sIRotDQUJNdILK8kSNHYs+ePZgxYwZ8fX2xYcMGJCUl4ciRI8JRqdDQUBw/flznxaeGDx+Offv2ISsrCw0bNjSqBku71Qj//xfbsre3B8dx5i7HZCgXWygXO8SYCaBcrLG0XLVyq5Ft27YhNDQUTZs2rXGB1dm4cSPmz5+vcS+2ffv26XXKrrCwEL///jsiIyON7hxZKltbW3OXUCsoF1soFzvEmAmgXKxhNZdE3xVHjRpVJ50j4OmVs5cuXYp79+6htLQUSUlJ6Nevn8Y6x44d03n0yMHBASUlJdi9e3ed1FpXFAoF4uLimB3sVhnKxRbKxQ4xZgIoF2tYzqVXB6n8BZqMpWtmGiGEEEKIJdKrg9SyZUu88847SEtLM2jnZWVl2LZtG9q0aYO1a9caVSAhhBBCSF3Tq4P0xRdfYMeOHfD19UVISAi++eYbnD9/Xuc9Tf7991/s3r0bb731FlxdXfHaa6/B09MTr776qsmLJ4QQQgipDXrPYsvPz8eKFSuwdu1a3Lt3DxzHQSKRoFGjRmjUqBFKS0uRm5uL0tLSpzvmOPTr1w8zZ85EeHh4rYaoDZY4i02hUEAmk1nETABToVxsoVzsEGMmgHKxxtJyGfLZbvC92JRKJfbv348jR47gzJkz+Pfff/Hw4UPY2trC2dkZ7dq1Q0hICAYPHgxPT88aBTEnS+wgWdJUSVOhXGyhXOwQYyaAcrHG0nIZ8tmu9yw2NalUioEDB2LlypU4d+4c7ty5g9LSUuTl5eHmzZvYvXs3pk6dynTnyBIpFAokJCQwOROgKpSLLZSLHWLMBFAu1rCcy+AOEiGEEEKI2FEHiRBCCCGkAuogMUQmM/jewkygXGyhXOwQYyaAcrGG1VwGD9J+VljaIG1CCCGE1EytDtIm5qFSqfDgwQOoVCpzl2JSlIstlIsdYswEUC7WsJyLOkiMUCqVSExMhFKpNHcpJkW52EK52CHGTADlYg3LuaiDRAghhBBSAXWQCCGEEEIqqNHQ8qSkJJw/fx75+fk6D59xHIf58+fX5CHI/+M4zmKuRGpKlIstlIsdYswEUC7WsJzLqFlsubm5GDJkCE6fPo2qNuc4jsnzjgDNYiOEEELExpDPdqOOIL333ns4deoUQkNDMX78eDRv3pzZ6xywQqVSITMzE+7u7pBIxHNmlHKxhXKxQ4yZAMrFGpZzGdWr2bdvH7p06YIjR44wediMRUqlEsnJyXBzc2PuRVYVysUWysUOMWYCKBdrWM5lVLUlJSUIDg6mzhEhhBBCRMmoDlJAQADS09NNXAohhBBCiGUwqoO0YMEC7N27F2fPnjV1PaQSHMfB2dlZdEftKBdbKBc7xJgJoFysYTmXXrPYNm7cqNX266+/Yt++fRgzZgxeeOGFSkeDjxs3ruZVmgHNYiOEEELExZDPdr06SBKJRKv3V3EzXctpmr/pKJVKpKSkoGXLlpBKpeYux2QoF1soFzvEmAmgXKyxtFwmn+YfExNjksKI8VQqFW7cuAEfHx+LeJGZCuViC+VihxgzAZSLNSzn0quDNH78+NqugxBCCCHEYrB1UQJCCCGEkDpgVAdp3759ePnll3H37l2dy+/evYuXX34Z+/fvr1Fx5H8kEgk8PDyYu9BWdSgXWygXO8SYCaBcrGE5l1H3YhswYADu3r2LS5cuVbpOx44d8dxzz2Hfvn01KtBcLG2QNiGEEEJqxpDPdqO6dJcuXUJgYGCV6wQGBiI5OdmY3RMdlEolLl68yOyswMpQLrZQLnaIMRNAuVjDci6jOki5ublwcXGpch0nJyfk5OQYVZRcLsecOXPg5uYGW1tbBAYGIj4+Xu/td+zYgW7duqF+/fpo1KgRunfvjqNHjxpVi6VQqVTIyMiASqUydykmRbnYQrnYIcZMAOViDcu5jOogOTs748aNG1Wuc+PGDTg6OhpV1IQJE7BixQqMGTMGq1atglQqRUREBE6dOlXttgsXLsQrr7wCd3d3rFixAosWLUL79u1x584do2ohhBBCyLNHr2n+FQUHB2P37t24fPky2rdvr7X80qVL2Lt3L15++WWD952UlITt27dj6dKlmDVrFoCnV+Nu27YtZs+ejTNnzlS67dmzZ/HJJ59g+fLlmDFjhsGPTQghhBACGHkEac6cOQCAnj174pNPPkFiYiIyMjKQmJiI6OhoBAUFQSKRYO7cuQbvOzY2FlKpFJMnTxbabGxsEBUVhcTERGRmZla67VdffYVmzZph2rRp4Hkejx49MjychZJIJPDz82NyJkBVKBdbKBc7xJgJoFysYTmXUbPYAGD37t0YP348SkpKNNp5nkeDBg2wceNGDBkyxOD99unTB3fu3MG1a9c02o8cOYLevXtj7969eOmll3Ru6+zsjO7duyMsLAyLFi3Cw4cP0axZM3z44YeYMmVKlY8rl8shl8uF7wsLC+Hu7o6cnBxhpLtEIoFUKoVSqdQ4n6puVygUGrdgkUqlkEgklbaXlZVp1CCTPT2gp1Ao9Gq3srKCSqXSGPzGcRxkMlml7ZXVTpkoE2WiTJSJMok9U2FhIZycnEx3qxFdhg0bhqCgIKxfvx7nz59HQUEBGjVqhC5dumD8+PFwdnY2ar/37t2Dq6urVru6rbJrL+Xl5SEnJwenT5/G0aNHsWDBAnh4eCAmJgbvvvsurKys8Oabb1b6uIsXL0Z0dLRW+6FDh2BnZwcA8PDwQMeOHXH58mVkZGQI6/j5+cHf3x9JSUnIzs4W2gMCAuDp6YkTJ06gqKhIaO/WrRtcXFxw6NAhjRdVWFgYbG1tERcXp1FDREQEHj16hBMnTghtMpkMkZGRyMnJQWJiotBub2+P8PBwZGZmaswiVHceU1JSNMaPmTNTSUkJEhISKBNjmTiOQ0REBHJzc0WTSSaToXHjxvD29kZSUhJlsuBM/fr1w6lTp1BQUCCaTJGRkcjKyhLV82Sp73uFhYXQl9FHkGqLj48P/Pz8tJ6E1NRU+Pj4YOXKlZg+fbrWdpmZmfDw8AAAbN++HaNGjQLwdAR9u3btUFhYWOXpOUs/glRWVob9+/ejT58+sLKyAmC5PXR9MwFASUkJ4uPjhVxiyKRQKFBWVibksrOzE0UmAEKuiIgIoR7WM5XP1b9/f41TAZTJsjKpxcXFabwXsp7JysoKcrkcBw4cEHKJIZNKpUJpaanwXlivXj2zZ6qTI0i1xdbWVqOjolZaWiosr2w74OmTMnz4cKFdIpFg1KhRWLBgATIyMoROVEXW1tawtrbWald/aJcnlUp13nRP/ULRt73ifqtq5zhOZz0SiUTnud3K2iur3RyZyreXzyWWTOX/L6ZMamLNZEhWylT3mdQf3Lrem1nNBECoW9/3eFYylX+PVz+WOTNVVr8uNRo1tWXLFvTp0wfOzs6wtraGs7Mz+vTpg61btxq9T1dXV9y7d0+rXd3m5uamcztHR0fY2NigSZMmWj9I9TWb8vLyjK6LEEIIIc8OozpISqUSw4YNw7hx43DkyBE8fvwYbm5uePz4MY4cOYKxY8di2LBhRl0YKiAgADdv3tQ6T3ju3DlhuS4SiQQBAQHIzs7GkydPNJapxy0ZOy7KEkilUgQEBOjsRbOMcrGFcrFDjJkAysUalnMZ1UH6+uuvsWfPHvTo0QOnT59GcXEx0tLSUFxcjDNnzqBnz5745Zdf8M033xi87+HDh0OpVGLNmjVCm1wuR0xMDAIDA+Hu7g4AyMjIwPXr1zW2HTVqFJRKJTZs2CC0lZaWYsuWLXj++ecrPfrEAolEAk9PTyanSlaFcrGFcrFDjJkAysUapnPxRujQoQPv5+fHP3nyROfyJ0+e8P7+/nyHDh2M2T0/YsQIXiaT8e+//z7/448/8t27d+dlMhl//PhxYZ2QkBC+YvnFxcV8mzZteCsrK37WrFn8119/zb/44ou8VCrl4+LiDKqhoKCAB8AXFBQYlcHUysrK+CNHjvBlZWXmLsWkKBdbKBc7xJiJ5ykXaywtlyGf7UZ16W7evIlBgwZVOVjrpZdews2bN43qtG3cuBHTp0/Hpk2bMHXqVJSVlWHfvn0IDg6ucjtbW1scPXoUr776KtatW4f3338fEokEv//+OwYMGGBULZaC53kUFRVpjMwXA8rFFsrFDjFmAigXa1jOZdQstnr16uHx48dVrvP48WPUq1fPqKJsbGywdOlSLF26tNJ1jh07prPdxcUF69evN+pxCSGEEEIAI8cgdezYETt37qz0oo337t3Dzp078cILL9SoOEIIIYQQczDqQpG//fYbBg8ejGbNmmHmzJkICQlB06ZNkZWVhWPHjmHFihXIysrCr7/+ioEDB9ZG3bWusLAQDRs21OtiUnVBpVIhJycHTk5ObA52qwTlYgvlYocYMwGUizWWlsuQz3ajr6S9YsUKfPDBBxpXwwSenm+UyWT44osvMGPGDGN2bREsrYNECCGEkJox5LPd6O7ce++9h+vXr2PhwoUYMmQIwsPDMWTIEHzyySe4fv06050jS1RWVobff/9d6/LvrKNcbKFc7BBjJoBysYblXDW61UiLFi0wf/58U9VCqqHrnkRiQLnYQrnYIcZMAOViDau5THJCUKFQIC8vj9kfAiGEEEJIeUZ3kJRKJVauXIkOHTrAxsYGTk5OsLGxQYcOHfDVV19RZ4kQQgghzDJqkPajR4/Qr18/nD17FhKJBO7u7sIstszMTKhUKnTr1g0HDx5E/fr1a6PuWmdpg7TVF9uyt7cHx3HmLsdkKBdbKBc7xJgJoFyssbRctT5I++OPP0ZiYiJeeeUV3Lp1C6mpqUhMTERqaipu3bqF0aNH48yZM/j444+NCkB0s7W1NXcJtYJysYVysUOMmQDKxRpWcxnVQdq5cyc6d+6MzZs3w8PDQ2OZh4cHtmzZgk6dOmHHjh0mKZI8HecVFxcnulOXlIstlIsdYswEUC7WsJzLqA7Sw4cP0bt37yrX6d27N3Jzc40qihBCCCHEnIzqILVs2RIPHjyocp3s7Gz4+voaVRQhhBBCiDkZ1UGaNm0aduzYgatXr+pcfuXKFWzfvh3Tp0+vSW2EEEIIIWZh1Cy2EydOYPny5Th06BDGjx+Pnj17CrPYTp48iY0bN6Jfv3547733tLYNDg42SeG1zRJnsSkUCshkMouYCWAqlIstlIsdYswEUC7WWFquWr8Xm0QiAcdxUG9aPrSutvIq3rvNUlliB8mSpkqaCuViC+VihxgzAZSLNZaWy5DPdqNuNfLxxx9bRNBniUKhQEJCAiIiImBlZWXuckyGcrGFcrFDjJkAysUalnMZ1UFauHChicsghBBCCLEcJrkXGyGEEEKImBh1BEnt4sWL2LZtG65fv47i4mIcPnwYAHD79m2cO3cOvXv3hqOjo0kKJYBMVqOny2JRLrZQLnaIMRNAuVjDai6jBmkDwOzZs7F8+XKNQdnqAdjp6enw9fXF8uXLMW3aNNNVW4csbZA2IYQQQmqm1u/FFhMTg2XLlmHgwIG4fPky5s6dq7Hcy8sLXbp0wd69e43ZPdFBpVLhwYMHUKlU5i7FpCgXWygXO8SYCaBcrGE5l1EdpO+//x6tW7fG7t270bZtW9SrV09rHX9/f6SkpNS4QPKUUqlEYmIiM5dJ0BflYgvlYocYMwGUizUs5zKqg3Tt2jX06dOnyvOKTZs2rfZ2JIQQQgghlsioDpJMJsOTJ0+qXOfu3bto0KCBUUURQgghhJiTUR2kdu3a4ejRo5UeMlPPaOvUqVONiiP/w3GcxVyJ1JQoF1soFzvEmAmgXKxhOZdRs9jWrVuHiRMnIioqCt9++y2WLFmCTz75BEqlEoWFhZg4cSJ2796NHTt2YPjw4bVRd62jWWyEEEKIuNT6LLY33ngDo0ePxtq1a+Hs7Iy1a9cCALp06YLnnnsOsbGxGD9+PLOdI0ukUqlw+/ZtJmcCVIVysYVysUOMmQDKxRqWcxl9Je2tW7fixx9/hLe3N+7cuQOe5/HHH3/Aw8MDq1evxrp160xZ5zNPqVQiOTmZyZkAVaFcbKFc7BBjJoBysYblXDW6vOWkSZMwadIklJSUIC8vDw4ODjQwmxBCCCHMM8m92GxtbeHm5mayzpFcLsecOXPg5uYGW1tbBAYGIj4+vtrtFi5cCI7jtL5sbGxMUhchhBBCng0WeYOUCRMmIDY2FtOnT0fLli2xfv16REREICEhAT179qx2+9WrV2t01qRSaW2WWyc4joOzszOTMwGqQrnYQrnYIcZMAOViDcu5jL4XW21JSkpCYGAgli5dilmzZgEASktL0bZtW7i4uODMmTOVbrtw4UJER0cjOzsbTk5ONaqDZrERQggh4lLrs9hqU2xsLKRSKSZPniy02djYICoqComJicjMzKx2HzzPo7CwEBbW96sRpVKJ69evMznQrSqUiy2Uix1izARQLtawnMviTrFdvHgRrVq10urZdenSBQCQnJwMd3f3KvfRokULPHr0CPXr18eQIUOwfPlyNG3atMpt5HI55HK58H1hYSEAoKysDGVlZQAAiUQCqVQKpVKpMWVR3a5QKDQ6ZVKpFBKJpNJ29X7V1LduUSgUWu1KpRI3btyAh4cHrKysAABWVlZQqVQaLzyO4yCTySptr6x2c2QCnv7cy+cSQyaFQoGysjIhl52dnSgyARBy+fj4gOM4UWQqn8vb21ujFspkWZnUKr4Xsp7JysoKCoVCI5cYMqlUKo33+Hr16pk9U8VcVbG4DtK9e/fg6uqq1a5uu3v3bqXbNm7cGFOmTEG3bt1gbW2NkydP4rvvvkNSUhL++OOPKg+nLV68GNHR0Vrthw4dgp2dHQDAw8MDHTt2xOXLl5GRkSGs4+fnB39/fyQlJSE7O1toDwgIgKenJ06cOIGioiKhvVu3bnBxccGhQ4c0XlRhYWGwtbVFXFycRg0RERF49OgRAAiD1WUyGSIjI5GTk4PExERhXXt7e4SHhyMzMxPJyclCu7OzM7p3746UlBTcuHFDaDdnppKSEiQkJAi5xJYJABISEkSXSU1MmdRv8g8fPsT58+cpkwVn6tu3LwBoTNxhPVNkZCQePnyokUsMmcq/R8THx1tEJvXBD30YNQZp48aNaNq0Kfr162foptXy8fGBn5+f1pOQmpoKHx8frFy5EtOnT9d7f1u3bsWYMWOwePFifPDBB5Wup+sIkru7O3JycoSOlTl76GVlZdi/fz/69OkjqiNIJSUliI+PF3KJIZP6CJI6l9iOIMXHxyMiIkKoh/VM5XP1798fEsn/Rh5QJsvKpBYXF6fxXsh6JisrK8jlchw4cEDIJYZMKpUKpaWlwnuhJRxBKiwshJOTk15jkIw6ghQVFYUpU6bUSgfJ1tZWo6OiVlpaKiw3xKuvvoqZM2fi8OHDVXaQrK2tYW1trdWu/tAuTyqV6pwZp36h6Ntecb9VtUulUnh4eMDa2lrjsSUSicabX3XtldVujkzA0597xVysZ7KysoJEIhFyVVU7S5nU9Xp4eAh1iyGTumYPDw/IZDKdtVAmy8gEPB3Touu9EGA3k3rfhrzHs5BJIpEY9B5fF5kqq1/nfvResxxXV1edvXpTcHV1xZ07d7Ta7927BwBwc3MzeJ/u7u7Izc2tcW3mJJVK0bFjR3OXYXKUiy2Uix1izARQLtawnMuoWWyDBg1CfHy8ziM9NRUQEICbN29qnSc8d+6csNwQPM8jPT0dzs7OpirRLJRKJS5evMjkTICqUC62UC52iDETQLlYw3IuozpIn332GerXr4+XX34ZV69eNWlBw4cPh1KpxJo1a4Q2uVyOmJgYBAYGCjPYMjIycP36dY1tyw/WUlu9ejWys7PRv39/k9ZZ11QqFTIyMpi84V9VKBdbKBc7xJgJoFysYTmXUafYOnbsCLlcjuTkZBw4cAA2NjZwcXHRulImx3G4deuWQfsODAzEiBEjMHfuXDx48AC+vr7YsGED0tPTsXbtWmG9cePG4fjx4xoDsTw9PTFq1Ci0a9cONjY2OHXqFLZv346AgAC8+eabxkQlhBBCyDPIqA6SSqVCvXr14OHhodFecUKcsRdq3LhxI+bPn49NmzYhLy8P7du3x759+xAcHFzldmPGjMGZM2ewe/dulJaWwtPTE7Nnz8aHH34oTNUnhBBCCKmOUR2k9PR0E5ehycbGBkuXLsXSpUsrXefYsWNabT/99FMtVmVeEokEfn5+Okf+s4xysYVysUOMmQDKxRqWc1ncvdgsBd2LjRBCCBGXOr0X27Vr1/Dzzz9j06ZNNd0VqYJCocCZM2dq7fIK5kK52EK52CHGTADlYg3LuYzuIJ0/fx4BAQFo164dRowYgQkTJgjLTpw4ATs7O+zdu9cUNRI8Hc+VnZ0tqhvwApSLNZSLHWLMBFAu1rCcy6gO0tWrVxEeHo60tDTMmDEDAwYM0FgeFBQEJycn7Nq1yyRFEkIIIYTUJaM6SAsWLAAA/Pnnn1i2bBlefPFFjeUcx6Fbt24aN0gkhBBCCGGFUR2k48ePY9iwYfD19a10HQ8PD+H2IKTmpFIpAgICdN6PhmWUiy2Uix1izARQLtawnMuoaf5FRUVwcXGpcp2SkhImLy1uqSQSCTw9Pc1dhslRLrZQLnaIMRNAuVjDci6jjiC5u7vjypUrVa5z4cIF+Pj4GFUU0aZQKHD06FEmZwJUhXKxhXKxQ4yZAMrFGpZzGdVBGjhwIA4dOoTDhw/rXL5z506cPXsWQ4YMqUltpBye51FUVMTkTICqUC62UC52iDETQLlYw3Iuo06xzZs3D7GxsYiIiMD48eNx//59AMD333+PxMREbNu2DV5eXnjvvfdMWiwhhBBCSF0wqoPk7OyM48ePY+zYsRo3kJ0yZQqApzec3bZtGxo2bGiaKgkhhBBC6lCNbzWSnJyMs2fPIjc3Fw4ODggMDNSa9s8iS7vViEqlQk5ODpycnJi8p01lKBdbKBc7xJgJoFyssbRchny2073YKmFpHSRCCCGE1Eyd3ovt4cOHOHr0KPbs2YOjR4/i4cOHNd0l0aGsrAy///47ysrKzF2KSVEutlAudogxE0C5WMNyLqPGIAFAeno6pk2bht9//11jdDrHcRg4cCC++uoreHl5maJG8v9YnCapD8rFFsrFDjFmAigXa1jNZVQH6datW+jRowcePHiAli1bokePHmjatCmysrJw5swZ7N27F2fPnsWZM2fQokULU9dMCCGEEFKrjOogzZkzB9nZ2fjhhx8wadIkcBwnLON5HmvWrMHbb7+NOXPm0A1rCSGEEMIcowZpN27cGKGhodizZ0+l6wwePBgnTpxAXl5ejQo0F0sbpK2+2Ja9vb1Gh5R1lIstlIsdYswEUC7WWFquWh+krVQq0aZNmyrXadu2Ld2LzcRsbW3NXUKtoFxsoVzsEGMmgHKxhtVcRnWQXnjhBVy9erXKda5evYrOnTsbVRTRplAoEBcXx+xgt8pQLrZQLnaIMRNAuVjDci6jOkifffYZ9u/fj//+9786l69ZswYHDx7EokWLalQcIYQQQog5GDVI+8iRIwgLC8Obb76J5cuXa8xiO336NG7evIl+/frh8OHDGje05TgO8+fPN1nxhBBCCCG1wagO0sKFC4X/37hxAzdu3NBa58CBAzhw4IBGG3WQCCGEEMICo2axHT9+3OgHDAkJMXrbumSJs9gUCgVkMplFzAQwFcrFFsrFDjFmAigXaywtlyGf7UYdQWKlkyM2JSUlsLe3N3cZJke52EK52CHGTADlYg2rucx/a12iF4VCgYSEBCZnAlSFcrGFcrFDjJkAysUalnNRB4kQQgghpALqIBFCCCGEVEAdJIbIZEYNGbN4lIstlIsdYswEUC7WsJrLqFlstU0ul+Pjjz/Gpk2bkJeXh/bt22PRokXo06ePQfvp06cPDh8+jHfeeQfffvutQdta2iw2QgghhNRMrd+LrbZNmDABK1aswJgxY7Bq1SpIpVJERETg1KlTeu/j559/RmJiYi1WWbdUKhUePHgAlUpl7lJMinKxhXKxQ4yZAMrFGpZzWVwHKSkpCdu3b8fixYuxdOlSTJ48GUePHoWnpydmz56t1z5KS0sxc+ZMzJkzp5arrTtKpRKJiYmiuwEw5WIL5WKHGDMBlIs1LOeyuA5SbGwspFIpJk+eLLTZ2NggKioKiYmJyMzMrHYfX375JVQqFWbNmlWbpRJCCCFEpPQaOSWRSIy6AibHcQZf++DixYto1aqV1rnBLl26AACSk5Ph7u5e6fYZGRlYsmQJ1q1bB1tbW70fVy6XQy6XC98XFhYCAMrKylBWVgbg6c9BKpVCqVRqHC5UtysUCpQf0iWVSiGRSCptV+9XTT2QreLPTCaTCduX38bKygoqlUqjZ85xHGQyWaXtldVujkzl86j/FUMmhUKhkUssmdR51MSSqXwulUqlsR/KZFmZyhNTJvV7RPlcYslU/r3QEjJVzFUVvTpIwcHBWh2kvLw8XL58GVKpFO7u7sLNajMzM6FUKtG+fXs0btxY70LU7t27B1dXV612ddvdu3er3H7mzJno2LEjRo8ebdDjLl68GNHR0Vrthw4dgp2dHQDAw8MDHTt2xOXLl5GRkSGs4+fnB39/fyQlJSE7O1toDwgIgKenJ06cOIGioiKhvVu3bnBxccGhQ4c0XlRhYWGwtbVFXFycRg0RERF4/PgxACA+Ph7A0xdlZGQkcnJyNMZa2dvbIzw8HJmZmUhOThbanZ2d0b17d6SkpGjcO8+cmUpKSpCQkCDkElsmAEhISBBdJuDpG5qYMslkMtjb2yM3NxdJSUmUyYIz9evXD7a2tsJ7oRgyRUZGIjc3F8D/3uPFkKn8e0R8fLxFZFIf/NCHUbPY/v33X/To0QNBQUH4/PPP4eHhISzLyMjA3Llzcfr0aZw6dQrNmzc3aN8+Pj7w8/PTehJSU1Ph4+ODlStXYvr06Tq3TUhIQK9evXDu3Dm8+OKLAJ6+keszi03XESR3d3fk5OQIR7MstYculr/iKRNlokyUiTJRptrMVFhYCCcnJ71msRnVQRo9ejTS09Nx9uzZStfp2rUrvL29sW3bNoP23bZtWzRt2hRHjhzRaL927RratGmDH374AW+++abWdgqFAh07dsQLL7yADRs2CO36dpAqsrRp/iqVCpmZmXB3d4dEYnFDx4xGudhCudghxkwA5WKNpeWq9Wn+hw8fRq9evapcJzw8HIcPHzZ4366urrh3755Wu7rNzc1N53YbN27EjRs38OabbyI9PV34AoCioiKkp6ejuLjY4HoshVKpRHJyMpMzAapCudhCudghxkwA5WINy7mM6iCVlpbq7MSUd/fuXZSUlBi874CAANy8eVPrPOG5c+eE5bpkZGSgrKwMPXr0gLe3t/AFPO08eXt749ChQwbXQwghhJBnj1EdpE6dOmH79u2VXojxzJkz2LFjhzAOyBDDhw+HUqnEmjVrhDa5XI6YmBgEBgYKM9gyMjJw/fp1YZ3Ro0djz549Wl/A00Fke/bsQWBgoMH1EEIIIeTZY9QNUj777DP06tULQUFBeOmll9CzZ0+4uLjgwYMHOHnyJPbt2weZTIZFixYZvO/AwECMGDECc+fOxYMHD+Dr64sNGzYgPT0da9euFdYbN24cjh8/LgzE8vf3h7+/v859ent7Y8iQIcZEtRgcx8HZ2dmoyy1YMsrFFsrFDjFmAigXa1jOZfS92I4cOYLJkycjLS3t6Y44TuiseHt7Y82aNdWOU6pMaWkp5s+fj82bNwv3Yvv000/Rr18/YZ3Q0FCNDlJlxDJImxBCCCE1Y8hne41uVsvzPE6dOoVLly6hoKAADRs2RIcOHdCzZ08me4vlWVoHSalUIiUlBS1btoRUKjV3OSZDudhCudghxkwA5WKNpeUy5LPdqFNsahzHISgoCEFBQTXZDdGDSqXCjRs34OPjYxEvMlOhXGyhXOwQYyaAcrGG5Vw16iABT69PdP36dTx+/Bhjx441RU2EEEIIIWZl9FWbzp8/j4CAALRr1w4jRozAhAkThGUnTpyAnZ0d9u7da4oaCSGEEELqlFEdpKtXryI8PBxpaWmYMWMGBgwYoLE8KCgITk5O2LVrl0mKJE8vre7h4WERVyI1JcrFFsrFDjFmAigXa1jOZdQg7eHDh+PgwYO4ePEifH19ER0djU8++UTjSpmjRo3CpUuXNK5VxBJLG6RNCCGEkJqp9VuNHD9+HMOGDYOvr2+l63h4eFR7tW2iP6VSiYsXLzJ5ufaqUC62UC52iDETQLlYw3IuozpIRUVFcHFxqXKdkpISJn8glkqlUiEjI0PjjsZiQLnYQrnYIcZMAOViDcu5jOogubu748qVK1Wuc+HCBfj4+BhVFCGEEEKIORnVQRo4cCAOHTqEw4cP61y+c+dOnD17lvnbexBCCCHk2WTUdZDmzZuH2NhYREREYPz48bh//z4A4Pvvv0diYiK2bdsGLy8vvPfeeyYt9lkmkUjg5+fH5EyAqlAutlAudogxE0C5WMNyLqNvNZKamoqxY8ciMTFRa1lgYKDQSWIVzWIjhBBCxKXWZ7EBQIsWLXD69GlcuHAB33//PRYtWoSvv/4a586dQ2JiItOdI0ukUChw5swZKBQKc5diUpSLLZSLHWLMBFAu1rCcq8a3GgkICEBAQIAJSiFV4Xke2dnZqMG9hS0S5WIL5WKHGDMBlIs1LOdi76QgIYQQQkgtM/oIUlFREdauXYtLly7h7t27KCsr01qH4zgcOXKkRgUSQgghhNQ1ozpI58+fx4ABA5CXl1flYTOO44wujGiSSqUICAiAVCo1dykmRbnYQrnYIcZMAOViDcu5jJrF1r17dyQlJWHx4sV45ZVX4OrqymT4qtAsNkIIIURcan0W28WLFzF69Gi8//77aN68ueg6R5ZIoVDg6NGjTM4EqArlYgvlYocYMwGUizUs5zKqg+To6AhnZ2dT10KqwPM8ioqKmJwJUBXKxRbKxQ4xZgIoF2tYzmVUB2nIkCE4evQokzefI4QQQgipjlEdpMWLF8PKygpjxozBnTt3TF0TIYQQQohZGX2rkQsXLqB3794oKChA48aNdQ524jgOt27dqnGR5mBpg7RVKhVycnLg5OTE5D1tKkO52EK52CHGTADlYo2l5TLks92oDtKRI0fw0ksvobS0FFZWVnBxcYFMpvuKAWlpaYbu3iJYWgeJEEIIYQmvVEJ1JRl8bg44RydI2gWAM/OkLkM+2426DtKcOXPA8zx27NiB4cOH0/WO6kBZWRkOHTqEvn37wsrKytzlmAzlYgvlYocYMwGUy1x4nkdxcbH+GySeAP/Td+AeZv+vrYkzMOkdoFuw3ruxs7MzWx/DqA7StWvX8Nprr2HEiBGmrodUgcVpkvqgXGyhXOwQYyaAcplDcXExGjRooNe6g5wbY3M7XwCaF4xW5TwAFi/Aa1f+wd7sPL329ejRI9SvX9/wgk3AqBOCzs7OsLW1NXUthBBCCGGYBMCXrTye/r/CkR/191+08mDiRrBGHUEaM2YMdu3ahZKSEuooEUIIISJna2uLrKysateTXLsC20XzKl/OcXC3sUZWwmGonm+n1+Oai1GDtJ88eYJRo0YhNzcXn3/+OTp06KD3oTdWWNogbfXFtuzt7UU15otysYVysUOMmQDKZS6PHz/W63N+RFNHxLT1rXa91//6B7uycqtdz9Sn2Gp9kLa6R8fzPIKDKx9sxXGcRZ9TZY1Yj9ZRLrZQLnaIMRNAuSzZfXmZSdczJ6NOAwYFBSE4OBghISEIDg6u9CsoKMioouRyOebMmQM3NzfY2toiMDAQ8fHx1W63Z88e9OvXD25ubrC2tkbz5s0xfPhw/PXXX0bVYUkUCgXi4uJE1+GkXGyhXOwQYyaAcpmLnZ0dHj16VO3X/tv/Pp2tVhUnZ+y//a9e+7Ozs6ubgDoYdQTp2LFjJi5D04QJExAbG4vp06ejZcuWWL9+PSIiIpCQkICePXtWut2VK1fQuHFjTJs2DU5OTrh//z7WrVuHLl26IDExER06dKjVugkhhBAx4jhO71NdiikzIY/+oNLl1u/MhMwChq5Ux6gOUm1KSkrC9u3bsXTpUsyaNQsAMG7cOLRt2xazZ8/GmTNnKt32448/1mqbOHEimjdvjtWrV+OHH36otboJIYQQAsiCwoAFS/DkuxXgcx4I7ZxzU9R7e8bT5QywuA5SbGwspFIpJk+eLLTZ2NggKioK8+bNQ2ZmJtzd3fXen4uLC+zs7JCfn18L1RJCCCGkIllQGKTdg/Ek+Q9cPHoEHcN7oV5AZ7NfSdsQRs1iCw8P12/nHIcjR44YtO8+ffrgzp07uHbtmkb7kSNH0Lt3b+zduxcvvfRSlfvIz89HWVkZ7t+/j6+++grr1q3DmjVrMGnSpEq3kcvlkMvlwveFhYVwd3dHTk6OMNJdIpFAKpVCqVRCpVIJ66rbFQoFyv84pVIpJBJJpe1lZZqD1NS3a6l4Dlomk4HneZSWlkImkwkzHKysrKBSqaBUKoV1OY6DTCartL2y2s2RCXh69ViFQiHkEkMm9X7UuerVqyeKTACEXLa2tuB5XhSZ1Lk4joNEItGohTJZViZ1+5MnT8BxnPBeyHomKysrKJVKyOVy4b1QDJlUKhUUCoXwXiiRSMyeqbCwEE5OTrU3i626MUgcxwm/nIa6d+8eXF1dtdrVbXfv3q12H127dsWNGzcAAA0aNMBHH32EqKioKrdZvHgxoqOjtdoPHTokDBLz8PBAx44dcfnyZWRkZAjr+Pn5wd/fH0lJScjO/t9l1QMCAuDp6YkTJ06gqKhIaO/WrRtcXFxw6NAhjRdVWFgYbG1tERcXp1FDREQEiouLNX7uMpkMkZGRyMnJQWJiotBub2+P8PBwZGZmIjk5WWh3dnZG9+7dkZKSIvxszJ2ppKQECQkJlImxTFKpVHSZZDIZgoKCUFJSgrNnz1ImC84UERGBu3fv4vLly6LJpP59EtPzZKnvEYWFhdCXUUeQKlNYWIgLFy5g3rx5aN68ObZt2wapgYfTfHx84Ofnp/UkpKamwsfHBytXrsT06dOr3EdiYiIKCwuRmpqKmJgYBAcHY/HixVXe38bSjyCVlZVh//796NOnj5BDDEdbSkpKEB8fL+QSQyaFQoGysjIhl52dnSgyARByRURECPWwnql8rv79+2vccZwyWVYmtbi4OI33QtYzWVlZQS6X48CBA0IuMWRSqVQoLS0V3gvr1atn9ky1fgSpMg4ODggNDcXBgwfRrl07fPbZZzoHTlfF1tZWo6OiVlpaKiyvTrdu3YT/jx49Gq1btwYALFu2rNJtrK2tYW1trdWu/tAuTyqV6uz4qV8o+rZX1mHT1V7+tFr55RKJROPNr7r2ymo3R6by7eVziSVT+f+LKZOaWDMZkpUy1X0m9Qe3rvdmVjMBEOrW9z2elUzl3+PVj2XOTIbcCLhWbodib2+PAQMGICYmxuBtXV1dce/ePa12dZubm5tB+2vcuDHCw8OxZcsWg2shhBBCyLOp1u4XJ5FIdHZ0qhMQEICbN29qnSc8d+6csNxQJSUlKCgoMHg7S1NZr5h1lIstlIsdYswEUC7WsJrLpGOQ1FJTU9G1a1c4Ojri+vXrBm177tw5dO3aVeM6SHK5HG3btkWTJk2EQWwZGRkoLi6Gv7+/sO2DBw/g4uKisb/09HS0b98eAQEBOHHihN51WNq92AghhBBSM7V+L7Y33nhDZ7tCocCdO3dw6tQplJWV4ZNPPjF434GBgRgxYgTmzp2LBw8ewNfXFxs2bEB6ejrWrl0rrDdu3DgcP35cYyBWu3bt0KtXLwQEBKBx48ZISUnB2rVrUVZWhiVLlhge1IKoVCrk5OTAyclJ57lbVlEutlAudogxE0C5WMN0Lt4IHMdV+eXv78//9NNPxuya53meLykp4WfNmsU3a9aMt7a25l988UX+wIEDGuuEhITwFctfsGAB37lzZ75x48a8TCbj3dzc+NGjR/OXL182uIaCggIeAF9QUGB0DlN68uQJ/8svv/BPnjwxdykmRbnYQrnYIcZMPE+5WGNpuQz5bDfqCFJaWprOdolEgkaNGsHe3t74HhueXjl76dKlWLp0aaXr6LoW08KFC7Fw4cIaPTYhhBBCiFEdJE9PT1PXQQghhBBiMUw6tLywsBDnzp2DjY0NevbsadSVtIluHMfB3t5edD9TysUWysUOMWYCKBdrWM5l1Cy2n376CZs3b8Yvv/yCxo0bAwAuXbqEAQMGICsrC8DTizWWv00Ha2gWGzEWr1RCdSUZfG4OOEcnSNoFMHWDRkIIEStDPtuNGlK+adMmyOVyoXMEADNnzsSDBw/w+uuvIyIiAomJiVi9erUxuyc6qFQq3L59W+Ny62IgtlyKkwkoGTMEpbPehvzzj1E6622UjBkCxcmE6jdmgNieLzUx5hJjJoBysYblXEZ1kG7evIkOHToI3z98+BAJCQmYOHEi/vvf/+K3337Diy++SFevNiGlUonk5GSN+9eIgSXn4nkejx8/1v/r8H7Ioz+AKueB5n5yHkAe/QEeH96v136MOKhbZyz5+aoJMeYSYyaAcrGG5VxGjUHKz8+Hs7Oz8P3JkycBAC+//LLQ1rNnT6xbt66G5RFiPsXFxWjQoIFe60oAXOvRAW7W9SDRca5dxfO4s3Au2py+hOr+jnr06BHq169veMGEEEJMxqgjSE2aNNG4jciRI0cglUrRo0cPoY3nea27ARMiVj0a2aO5jbXOzhEASDgO7jbW6NGoZpfAIIQQUjeMOoLUvn17/Prrr5gxYwZsbGywdetW9OjRQ+Ov3vT0dLi6upqs0Gcdx3FwdnZmciZAVSw5l62trTDpoDrSM8eBb5dVu96emLVQdg+p9nEtlSU/XzUhxlxizARQLtawnMuoWWwJCQno3bu3Rtsvv/yCl156CcDTQVmurq4IDw/Htm3bTFNpHaNZbOTx48d6n2ILamSP/Z1aV7vegD//xsn8oirXoVNshBBSO2p9FltYWBj27t2LoUOHYujQodixY4fQOQKA06dPw83NTWNMEqkZpVKJ69evMznQrSpiyXU6vwj/lsqhquTvDRXPI7NUjtPVdI4snVier4rEmEuMmQDKxRqWcxl9ocjIyEhERkbqXBYUFISLFy8aXRTRplKpcOPGDfj4+EAqomvqWHIuOzs7PHr0SP8NEk8ASxaCB1DxYLKE4+C+cDEKuwXr9biWypKfr5oQYy4xZgIoF2tYzmXSK2kTIiYcxxl2qqv3ACisbfDkuxXgy03155ybot7bMyALCquFKgkhhNSGGnWQkpKScP78eeTn5+s8fMZxHObPn1+ThyCEKbKgMEi7B+NJ8h+4ePQIOob3Qr2AznQlbUIIYYxRHaTc3FwMGTIEp0+frvKidtRBMh2JRAIPDw9IJEYNG7NYYszFSaWQBXSGtaQeZO3bi6pzJMbnCxBnLjFmAigXa1jOZdQstgkTJmDjxo0IDQ3F+PHj0bx5c8hkuvtaISFVT2m2VDSLjRBCCBEXQz7bjTqCtG/fPnTp0gVHjhxh8toGLFIqlbh8+TLat2/P3EC3qlAutlAudogxE0C5WMNyLqOOeZWUlCA4OJg6R3VIpVIhIyODyRv+VYVysYVysUOMmQDKxRqWcxnVQQoICEB6erqJSyGEEEIIsQxGdZAWLFiAvXv34uzZs6auhxBCCCHE7Iwag3T//n1ERkYiJCQEY8aMwQsvvFDpYKdx48bVqEDylEQigZ+fH5MzAapCudhCudghxkwA5WINy7mMmsUmkUjAcZzGFP+K45F4ngfHcUxeXhygWWyEEEKI2NT6LLaYmBijCiPGUygUSEpKQpcuXSq9pAKLKBdbKBc7xJgJoFysYTmXUdWOHz/e1HWQavA8j+zs7CovzMkiysUWysUOMWYCKBdrWM7FVndOBHieR3FxsWHbKBR4fP4s7C+ex+MmDWHV/gXAgOtJ2NnZ0SUZCCGEEAPUqIOUnp6OLVu2IDk5GYWFhXBwcEBAQADGjBkDLy8vE5UoLsXFxWjQoIHe6w9ybowvW3mguY01AgHgVDz+LZVj9s0M7M3O02sfjx49Muymq4QQQsgzzqhB2gCwatUqzJ49GwqFQuvQmZWVFb788ktMmzbNJEWaQ20N0n78+LHeHaRBzo2xuZ0vAEBS7giQ6v9/3q9d+UevTpKldpB4pRKKyxeRk3IDTi39IGvfUTT3LVOpVMjMzIS7uzuTszcqQ7nYIcZMAOVijaXlMuSz3agO0r59+zBo0CA4OTlhxowZCAsLg6urK+7fv4+EhASsWLECDx8+xN69exEZGWl0EHOqrQ6S3qfYlEpg0qvAw+zK13FyBtZsrfZ0myWeYlOcTMCT71aAz3kgtHFOLqj3znuQBYWZsTJCCCFiZchnu1HduRUrVsDR0REXLlzA3Llz0bVrV3h6eiIwMBAffPAB/vzzTzRu3BgrVqwwKoCYcRyH+vXrV/tlk5pSdecIAHKyYZOaUu2+LLFzJI/+QKNzBAB8zgPIoz+A4mSCmSozHYVCgaNHj0KhUJi7FJOiXOwQYyaAcrGG5VxGjUG6cOECxowZg+bNm+tc7u7ujpEjR2Lr1q01Kk6M9D6CdO+OXvsrvXcHaOlf5Tq1fQTJoIHnSiXw7fIqV5F/txzyDp2YPDKmxvM8ioqKmJy5URXKxQ4xZgIoF2tYzmXUEaQnT55UO6alQYMGePLkiVFFyeVyzJkzB25ubrC1tUVgYCDi4+Or3e7nn3/GqFGj0KJFC9jZ2cHPzw8zZ85Efn6+UXXUBvUg7eq+Brz6ml77G/Dqa9Xuy9BZc7WVqUGDBhjg2VyvI2MDPJubPRchhJBnl1EdpFatWuG3336r9JCZQqHAvn370KpVK6OKmjBhAlasWIExY8Zg1apVkEqliIiIwKlTp6rcbvLkyfj777/x2muv4euvv0b//v3x7bffolu3bigpKTGqFnM5nV+Ef0vlwoDsilQ8j8xSOU7nF9VxZdoM+cugmbWVydZj8S8SQgghbDDqFNu4ceMwa9Ys9OvXD19++SU6deokLPvjjz8wd+5c3LhxA8uWLTN430lJSdi+fTuWLl2KWbNmCY/Xtm1bzJ49G2fOnKl029jYWISGhmq0derUCePHj8eWLVswceJEg+sxNTs7Ozx69Ei/lRNPAEsWggdQ8USShOPgvnAxCrsF6/WYluK+vMyk61kqqVSKbt26QSqSWXlqlIsdYswEUC7WsJzLqFlsSqUSw4YNw969e8FxHOzs7ODi4oIHDx6guLgYPM9j8ODB2L17t8HT+mbPno0VK1YgNzdXY4T54sWLMW/ePGRkZMDd3V3v/RUVFcHBwQHvvfceli+veuxLeZZyLzads72cm6Le2zMsZraXIZcukAC41qMD3KzraVy6QE3F87gjf4I2py9BVc2+LPXyBYQQQixTrd+LTSqV4pdffsHGjRuxYcMGJCcnIyMjAw4ODggMDMT48eMxduxYo4q/ePEiWrVqpVV4ly5dAADJyckGdZDu378PAHBycqpyPblcDrlcLnxfWFgIACgrK0NZ2dOjGRKJBFKpFEqlEirV/z6+1e0VrwkllUohkUgqbVfvV019nxqNU5dde8KmWxCeJP+Jy8cS0CYoGLIOL4D//964SqXSuCEwx3GQyWSVtldWe00yWVlZobCwUO9M3NlT4JZ9qnVkjMfTI2PPffw5HnbpXm2mevXqAUCtZCrfrtfzVKG9rKwMR48eRXh4OOzs7CzieappJgBCrn79+gn1sJ5JnSshIQG9e/fW+KOOMllWJrWDBw8iPDwcVlZWoshkZWUFuVyO+Ph4IZcYMqlUKpSWlgrvhfXq1TN7poq5qlKjK2mPGzcO48aNq8kutNy7dw+urq5a7eq2u3fvGrS/L774AlKpFMOHD69yvcWLFyM6Olqr/dChQ8IpKg8PD3Ts2BGXL19GRkaGsI6fnx/8/f2RlJSE7Oz/DUAOCAiAp6cnTpw4gaKi/40V6tatG1xcXHDo0CGNF1VYWBhsbW0RFxenUUNERASKW7TEvTv3ce9+NnD/IGQyGSIjI5GTk4PExERhXXt7e4SHhyMzMxPJyclCu7OzM7p3746UlBTcuHFDaDd1pt9//11npiNHjmhk6v/RZyhbvRJ4mCO0yRs4oOHMecj1a4PE48e1Mt2+fdssmQx5nkpKSpCQ8L/LFCQkJFjk81STTGpiyiSTyaBQKPDw4UOcP3+eMllwpr59+0KpVGpM3GE9U2RkJB4+fKiRSwyZyr9HxMfHW0Qm9cEPfRh9Je3a4uPjAz8/P60nITU1FT4+Pli5ciWmT5+u1762bt2KMWPGYPbs2fjiiy+qXFfXESR3d3fk5OQIR7PM2UMvKyvD/v370adPH+GvJnUPndW/4nmlEiV/JuHKiWNoFxwKqw4voJ6NDdOZ1O1lZWWIj49Hnz59RHcEKT4+HhEREaI7ghQfH4/+/fuL5miLGDOpxcXFabwXsp5JfQTpwIEDQi4xZFIfQVK/F1rCEaTCwkI4OTnV3im2ffv2Yd26dfj222/h5uamtfzu3buYMmUKJk2ahAEDBhi0b1tbW42OilppaamwXB8nT55EVFQU+vXrh88++6za9a2trWFtba3VbmVlJfwSqkmlUp0DztQvFH3bK+63qnb19X4q1iORSHSO86qsvbLazZJJJkO9F15E1v1sdHrhRWEdljNVbBdjJjWxZjIkK2Wq+0zqD25d782sZgIg1K3vezwrmcr/Qa9+LHNmqqx+XYya5v/dd9/h1q1bOjtHAODm5oa0tDR89913Bu/b1dUV9+7d02pXt1X2mOVdunQJgwYNQtu2bREbG1vpD4slMpkMYWFhoshSHuViC+VihxgzAeLMxSuV4K5eQriMB3f1EvhyR1dYx/LzZVTFly5dwsCBA6tcJzAwEPv27TN43wEBAUhISEBhYaHG4a9z584Jy6ty69Yt9O/fHy4uLoiLi9N7dhUL9D16xhrKxRbKxQ4xZgLElav8TGUJADnEd19KVp8vo44g5ebmwsXFpcp1nJyckJOTU+U6ugwfPhxKpRJr1qwR2uRyOWJiYhAYGCjMYMvIyMD169c1tr1//z769u0LiUSCgwcPwtnZ2eDHt1QKhQJxcXFM3s+mKpSLLZSLHWLMBFh+Lp7n8fjxY/2+Du+HPPoDqCq5L+Xjw/v13peFDScWWPrzVRWjjiA5OztrjDTX5caNG3B0dDR434GBgRgxYgTmzp2LBw8ewNfXFxs2bEB6ejrWrl0rrDdu3DgcP35c40XRv39/pKamYvbs2Th16pTGlbebNm2KPn36GFwPIYQQoi/1rZeqo9c14RbO1euacABdF642GNVBCg4Oxu7du3H58mW0b99ea/mlS5ewd+9evPzyy0YVtXHjRsyfPx+bNm1CXl4e2rdvj3379iE4uOqrRl+6dAkA8OWXX2otCwkJoQ4SIYQQi9CjkT2a22hPDFKTcBzcbazRo5E9TlrALaWeRUZ1kObMmYPdu3ejZ8+emDVrFvr06YPnnnsOd+7cwaFDh7B8+XJIJBLMnTvXqKJsbGywdOlSLF26tNJ1jh07ptVmqYcYCSGEPBtsbW2RlZVV7XrSM8eBb6u/HdeemLVQdg/R63GJaRl9HaTdu3dj/PjxWjeB5XkeDRo0wMaNGzFkyBBT1GgWlnKrETWe56FQKCCTyYQp/2JAudhCudjAK5VQXrkIRfYDyJxdIG3XERyD98LSxdKfK31vvRTUyB77O7Wudr0Bf/6t1xEkSz3FZmnPV63fagQAhg0bhqCgIKxfvx7nz59HQUEBGjVqhC5dumD8+PGiGiBtKUpKSmBvb2/uMkyOcrGFclm2ivdvVEJ8s6LE8Fydzi/Cv6Xyau9LeVoEp9dYfb4s7kralsLSjiCVlZUhLi4OERERBl3oytJRLrZQrrrH8zyKi4v1WznxBLBkodZ9DgUfLAS6VT2WU83Ozs4i/uKvyJKfK4Cer4os7fmqkyNIhBBCat/jx4/1+uvb1LOiioqKRHUdubrCcZz+p7p6D4DC2kbjiB8AcM5NUe/tGaI54scq6iARQogF0/dohKlnRek7XZ3UjCwoDNLuwXiS/AcuHj2CjuG9UC+gs2jGjLGMOkgMYfFS7fqgXGyhXHXLzs5Or/WaWet3+kLf9fR9XHOw1OfKWJxUCkn7F/Dwfg4k7V8QXeeI1eeLxiBVwtLGIBFibrxSCdWVZPC5OeAcnSBpFyC6N3JLpPeYlivJwEfvVb/eohVAu4BqV7PUMS2E1ASNQRIhlUqFnJwcODk56bwLMqsoFxsqzowCxDUzypKfL33HtPAvdkWJk4vGc6S1L+emsH2xK9MdW0t+rmqCclketqp9himVSiQmJkIpors8A5TLHAy6V5QJ7xdlyQerLfn50hcnlaLeO1UfQar39gymO0eAOJ4rXSiX5THqCNLdu3fh5uZm6loIIXXAkMG3ppwZZakXshMTWVAYsGAJzYoixASM6iB5eXlhwIABmDRpEiIiIpg7bEbIs8yQIzmmnBllyUeQxIRmRRFiGkb1bLp27YrffvsNgwcPhoeHBz7++GOkp6ebuDRSHsdxsLe3F92gScpl2Uw9M8pSieX5UuOkUkg7dEJxp66Qdugkqs6R2J4rNcpleYyexXbz5k389NNP2LRpEx48eACJRILevXtj0qRJGDx4MLPT+tRoFhsRK33vFQWY9n5RdIqNEGJuhny213iav0KhwK+//or//ve/iI+PB8/zcHJywoQJExAVFYVWrVrVZPdmY2kdJJVKhczMTLi7u4vqlCblqnsG3QpBqQQmvQo8zK58HSdnYM1WoJqjFJY8bdySny9jiTETQLlYY2m5DPlsr3G1MpkMw4YNw/79+5Geno4FCxZAIpFg2bJlaN26NcLCwrBz504af1BDSqUSycnJTM4EqArlqnvqaeN6fTk4wHrKzCr3Z/3OTNR3cKh2X5baOQIs+/kylhgzAZSLNSznMll3TqVS4c8//8T58+eRnZ0Nnufh7u6O06dP45VXXkGHDh2QkpJiqocjhNQRWVAYrBcsAefkotHOOTeF9YIlNDOKECJKNR4olJqaiv/+97/YsGED7t+/LxxReuuttxAWFob79+9j5cqVWLlyJf7zn//g8OHDpqibEFKHaGYUIeRZY1QHqaysDLt378ZPP/2E48ePQ6VSwdvbG59//jlef/11uLj87y/NZs2a4YsvvkBhYSE2btxossKfNRzHwdnZ2aJPUxiDcrFDPTNKJVeKcmaU6J4vEWYCKBdrWM5l1CBtZ2dn5ObmQiqV4qWXXsKbb76Jvn37VrnNkiVLMG/ePKhU1V1OzjJY2iBtQgghhNRMrQ/StrOzQ3R0NG7fvo3du3dX2zkCgLfffhtpaWnGPBzB04Fu169fZ3KgW1UoF1soFzvEmAmgXKxhOZdRHaT09HR89NFHcHV11XsbBwcHeHp6GvNwBE8Hwd+4cYOZI3D6olxsoVzsEGMmgHKxhuVcRnWQWDyXSAghhBCiL6MGab/xxhvVriORSODg4AA/Pz8MHDgQzz33nDEPRQghhBBS54zqIK1fv144iqRrjDfHcRrt7777Lj7++GN89NFHRpZJJBIJPDw8LOJKpKZEudhCudghxkwA5WINy7mMmsWWlpaG6dOnIykpCdOmTUOPHj3QtGlTZGVl4fTp0/j666/RpUsXfPjhh7h06RIWLVqEzMxMbN26FaNGjaqNHCZHs9gIIYQQcan1WWw7duzAuXPnkJycjA8++ABBQUFo1aoVgoKC8MEHH+DChQs4e/YsEhISMHHiRJw+fRoNGjTA999/b1Qg8nQmwMWLF5mcCVAVysUWysUOMWYCKBdrWM5lVAdp7dq1GDlyJJo2bapzebNmzTBixAj89NNPAIDnnnsOAwcOxKVLl4yv9BmnUqmQkZHB5EyAqlAutlAudogxE0C5WMNyLqM6SP/++y+sra2rXMfGxgb//vuv8L2HhwdKS0uNeThCCCGEkDplVAfpueeewy+//FJph6e0tBS//PKLxsy1Bw8eoHHjxsZVSQghhBBSh4zqIEVFReHWrVvo2bMn9u7di4cPHwIAHj58iL1796Jnz55ITU3VuBzAyZMn0aFDB9NU/QySSCTw8/NjciZAVSgXWygXO8SYCaBcrGE5l1Gz2JRKJV5//XVs3rxZmO4vkUiEc4w8z+PVV1/Fxo0bIZFIkJWVhSVLlqB///7o169ftfuXy+X4+OOPsWnTJuTl5aF9+/ZYtGgR+vTpU+V2N27cwA8//IBz587hwoULkMvlSEtLg5eXl6ERaRYbIYQQIjK1PotNKpVi48aNOHz4MMaNG4eAgAB4eXkhICAA48ePR3x8PDZv3iz0GJs2bYqVK1fq1TkCgAkTJmDFihUYM2YMVq1aBalUioiICJw6darK7RITE/H111+jqKgIrVu3NiaaxVIoFDhz5gwUCoW5SzEpysUWysUOMWYCKBdrWM5l1IUiT5w4AQcHB4SHhyM8PNykBSUlJWH79u1YunQpZs2aBQAYN24c2rZti9mzZ+PMmTOVbjto0CDk5+fD3t4ey5YtQ3JysklrMyee55Gdna3zwpwso1xsoVzsEGMmgHKxhuVcRh1BCgsLw5o1a0xdCwAgNjYWUqkUkydPFtpsbGwQFRWFxMREZGZmVrqto6Mj7O3ta6UuQgghhDw7jDqC5OLiAhsbG1PXAgC4ePEiWrVqpXVusEuXLgCA5ORkuLu7m/xx5XI55HK58H1hYSEAoKysDGVlZQCejrOSSqVQKpUa13RQtysUCo1eslQqhUQiqbRdvV81mezp01HxUKRMJhO2L7+NlZUVVCqVxgW4OI6DTCartL2y2s2RqXwe9b9iyKRQKDRyiSWTOo+aWDKVz6VSqTT2Q5ksK1N5Ysqkfo8on0ssmcq/F1pCpoq5qmJUB6lPnz44duwYeJ4XBmmbyr179+Dq6qrVrm67e/euSR9PbfHixYiOjtZqP3ToEOzs7AA8vZZTx44dcfnyZWRkZAjr+Pn5wd/fH0lJScjOzhbaAwIC4OnpiRMnTqCoqEho79atG1xcXHDo0CGNF1VYWBhsbW0RFxenUUNERARKSkoAAPHx8QCevigjIyORk5ODxMREYV17e3uEh4cjMzNT4xSjs7MzunfvjpSUFNy4cUNoN3emhIQEIZfYMgFAQkKC6DKp35zElEkmkyEgIAB5eXk4d+4cZbLgTAMGDECLFi2E90IxZIqMjEReXh6A/73HiyFT+feI+Ph4i8ikPvihD6Nmsd29exfdunVD37598cUXX8DR0dHQXVTKx8cHfn5+Wk9CamoqfHx8sHLlSkyfPr3a/Sxbtgzvv/++3rPYdB1Bcnd3R05OjnA0y1J76GL5K54yUSbKRJkoE2WqzUyFhYVwcnLSaxabUUeQXnvtNTRq1Ajr1q3D5s2b4e3tjaZNm2odTeI4DkeOHDFo37a2thodFTX1RSltbW2NKbla1tbWOq8ObmVlBSsrK402qVQKqVSqta76haJve8X9VtWuUChw8uRJBAcHa+xPIpHovL5EZe2V1W6OTMDT18iJEyc0crGeycrKCgqFQshVVe0sZQKgkUsmk4kiE/A019GjRxEcHKxzfcpkGZkA7ddgeaxmAp6eCjXkPZ6FTOrL/+j7Hl8XmSqrX+d+9F6znGPHjgn/l8vluH79Oq5fv661njGn31xdXXHnzh2t9nv37gEA3NzcDN6nGPA8j6KiIiZnAlSFcrGFcrFDjJkAysUalnMZNYtNpVLp9WXM3XsDAgJw8+ZNrfOE6nPoAQEBxpRMCCGEEKI3i7v29/Dhw6FUKjUuIyCXyxETE4PAwEBhBltGRobOo1aEEEIIITVl1Cm28h49eoSbN2/i8ePHCAoKqnFBgYGBGDFiBObOnYsHDx7A19cXGzZsQHp6OtauXSusN27cOBw/flzjsF1BQQG++eYbAMDp06cBAN9++y0aNWqERo0aYcqUKTWuz1ykUim6deum8zwsyygXWygXO8SYCaBcrGE5l1Gz2AAgPT0d06ZNQ1xcHFQqFTiOE0axnz59GpMmTcL333+P0NBQg/ddWlqK+fPnY/PmzcK92D799FONW5WEhoZqdZDS09Ph7e2tc5+enp5IT0/Xuwa6FxshhBAiLrV+L7aMjAx07doVcXFxGDx4MLp166bRUQkMDEROTg62bdtmzO5hY2ODpUuX4t69eygtLUVSUpLWfdzU12Eqz8vLCzzP6/wypHNkicrKyvD7778bdJErFlAutlAudogxE0C5WMNyLqM6SAsWLEBeXh6OHz+O2NhY9OnTR2O5TCZDUFCQcJqLmAaLN/vTB+ViC+VihxgzAZSLNazmMqqDdPDgQQwdOhTdu3evdB1PT0+d0/UJIYQQQiydUR2k3Nzcaq9OzfO8zgs+EkIIIYRYOqM6SE2bNkVKSkqV61y5cgUeHh5GFUW0yWQyhIWFVXqFUFZRLrZQLnaIMRNAuVjDci6jOkh9+vTBvn37cPnyZZ3LT548iaNHjyIiIqJGxRFNtXWbFXOjXGyhXOwQYyaAcrGG1VxGdZA++ugj2NraIjg4GJ999hn++ecfAMD+/fsxf/589O/fH05OTnj//fdNWuyzTKFQIC4ujtnBbpWhXGyhXOwQYyaAcrGG5VxGHfPy8vLCwYMHMXr0aMyfPx8cx4HneQwcOBA8z8PDwwOxsbFwdXU1db2EEEIIIbXO6JOCgYGBSElJwW+//YZz584hNzcXDg4OCAwMxODBg1GvXj1T1kkIIYQQUmdqNGpKJpNh6NChGDp0qKnqIYQQQggxO6NvNSJ2lnarEZ7noVAoIJPJwHGcucsxGcrFFsrFDjFmAigXaywtlyGf7UYfQXry5Al++eUXnD9/Hvn5+VAqlVrrcByncYNZUjMlJSWwt7c3dxkmR7nYQrnYIcZMAOViDau5jOog3b59G3369MGtW7e07odWHnWQTEehUCAhIQERERGwsrIydzkmQ7nYQrnYIcZMAOViDcu5jOogzZgxA//88w/Gjh2LN954A82bN2fyIlCEEEIIIboY1as5evQoevXqhQ0bNpi6HkIIIYQQszPqQpEqlQodO3Y0dS2kGmI9Ske52EK52CHGTADlYg2ruYyaxdanTx/Y2Njgt99+q42aLIKlzWIjhBBCSM0Y8tlu1BGkJUuW4OjRo4iNjTWqQGI4lUqFBw8eQKVSmbsUk6JcbKFc7BBjJoBysYblXEYd9/r9998RFhaGUaNGISQkBC+88ILOnhjHcZg/f36NiySAUqlEYmIiIiIiIJEY1a+1SJSLLZSLHWLMxCuVKEv+A6lHj6BheC/UC+gMTio1d1kmIcbnC2A7l1EdpIULFwr/P3bsGI4dO6ZzPeogEUIIMQXFyQQ8+W4F+JwHaAdAcfBXKJ1cUO+d9yALCjN3eUSEjOogJSQkmLoOQgghRCfFyQTIoz/QaudzHjxtX7CEOknE5IzqIIWEhJi6DlINjuNgb29vEZdqNyXKxRbKxQ5Lz6RSqZCTk6PHikrYfrMUHABdSXgAJd8uQ0lLf0BS/ek2JycnizzVY+nPl7FYzkX3YqsEzWIjhJDa8+DBAzRt2rTa9YIa2WN/p9bVrjfgz79xMr+o2vWysrLg4uKiV41EfGp9Fhvw9PLhK1euRJcuXeDg4KBxnYPk5GS8/fbbuHnzprG7JxWoVCrcvn2byZkAVaFcbKFc7BBLpmbW+t2eQt/1LJVYnq+KWM5l1Cm2kpIS9O3bF2fOnIGTkxMcHBzw+PFjYbm3tzdiYmLg6OiIRYsWmazYZ5lSqURycjLc3Nws8vCwsSgXWygXOyw9k5OTE7KysqpdT3LtCrBoXrXrfb99J759vp1ej2uJLP35MhbLuYzqIH3++ec4ffo0lixZgvfffx/R0dH49NNPheUNGzZESEgIDh48SB0kQgghWiQSiV6nuvgmoShxcgGf86DSdTjnpmgSFCqaKf/EMhjVnduxYwfCwsIwe/ZscBync/BVixYtkJGRUeMCCSGEPLs4qRT13nmvynXqvT2DOkfE5IzqIGVkZKBz585VrmNvb4+CggKjiiLaOI6Ds7MzkzMBqkK52EK52CGmTLKgMFgvWALOSfOIE+fcFNYimeIvpuerPJZzGXWKzd7eHg8eVH64EwBu3boFZ2dno4oi2mQyGbp3727uMkyOcrGFcrFDbJlkQWGQdg+G6koy+NwccI5OkLQLEM2RI7E9X2os5zLqCFLXrl3x22+/IT8/X+fyzMxMxMXFITg4uCa1kXKUSiWuX78OpVJp7lJMinKxhXKxQ4yZOKkUaBeAf9w8ARF1jgBxPl8A27mM6iC9//77yMvLQ69evXD69GkoFAoAQHFxMY4cOYJ+/fpBoVDgvfeqPm9cGblcjjlz5sDNzQ22trYIDAxEfHy8XtveuXMHI0eORKNGjeDg4IDBgwcjNTXVqDosiUqlwo0bN5icKlkVysUWysUOMWYCKBdrWM5l1Cm24OBgfPvtt5g2bZrGUSJ7e3sAgFQqxffff49OnToZVdSECRMQGxuL6dOno2XLlli/fj0iIiKQkJCAnj17Vrrdo0ePEBYWhoKCAsybNw9WVlZYuXIlQkJCkJycjCZNmhhVDyGEEEKeLUZ1kADgP//5D0JDQ/HDDz/g3LlzyM3NhYODAwIDA/H222+jTZs2Ru03KSkJ27dvx9KlSzFr1iwAwLhx49C2bVvMnj0bZ86cqXTb77//HikpKUhKSsKLL74IABgwYADatm2L5cuX4/PPPzeqJkIIIYQ8W4zuIAFA69atsWrVKlPVAgCIjY2FVCrF5MmThTYbGxtERUVh3rx5yMzMhLu7e6Xbvvjii0LnCAD8/f3Rq1cv7Ny5k+kOkkQigYeHB3MX2qoO5WIL5WKHGDMBlIs1LOeqUQepNly8eBGtWrXSukdKly5dADy9jYmuDpJKpcLly5fxxhtvaC3r0qULDh06hKKiIuE0YEVyuRxyuVz4vrCwEABQVlaGsrIyAE+faKlUCqVSqXE+Vd2uUChQ/tZ2UqkUEomk0nb1ftXUt2tRj+kq3y6RSNC2bVuoVCrhsa2srKBSqTQGv3EcB5lMVml7ZbWbIxPw9Hkrn0sMmdTt6lxSqVQ0mdS5xJYJADp27AiVSqWxH8pkeZmsrKzQoUMHjXrEkInjOI33QjFkUmdR5+J53uyZKuaqisV1kO7duwdXV1etdnXb3bt3dW6Xm5sLuVxe7bZ+fn46t1+8eDGio6O12g8dOgQ7OzsAgIeHBzp27IjLly9rXATTz88P/v7+SEpKQnZ2ttAeEBAAT09PnDhxAkVF/7uJYrdu3eDi4oJDhw5pvKjCwsJga2uLuLg4jRoiIiLw+PFjHD9+XGiTyWSIjIxETk4OEhMThXZ7e3uEh4cjMzMTycnJQruzszO6d++OlJQU3LhxQ2g3Z6aSkhIkJCRQJsYycRyHyMhIPHz4UDSZZDIZ3Nzc0KxZMyQlJVEmC87Uv39/nDt3TuMxWc8UGRmJrKwsUT1Plvq+pz74oQ+OL9/NsgA+Pj7w8/PTehJSU1Ph4+ODlStXYvr06VrbZWZmwsPDA1988QVmz56tsWzdunWIiorCxYsXERAQoPNxdR1Bcnd3R05OjnA0y5w99LKyMuzfvx99+vSBldXTmzKK4WhLSUkJ4uPjhVxiyKRQKFBWVibksrOzE0UmAEKuiIgIoR7WM5XP1b9/f41TAZTJsjKpxcXFabwXsp7JysoKcrkcBw4cEHKJIZNKpUJpaanwXlivXj2zZyosLISTkxMKCgq0zlRVZHFHkGxtbTU6KmqlpaXC8sq2A2DUtgBgbW0Na2trrXb1h3Z5UqkUUh3X31C/UPRtr7jfqtrVVyGtWI9EItF5brey9spqN0em8u3lc4klU/n/iymTmlgzGZKVMtV9JvUHt673ZlYzARDq1vc9npVM5d/j1Y9lzkyV1a9zP3qvWUdcXV1x584drfZ79+4BANzc3HRu5+joCGtra2E9Q7bVRd37NORwXG0qKytDcXExCgsLDXqCLR3lYgvlYocYMwGUizWWlkv9ma7PyTOL6yAFBAQgISEBhYWFGoe/zp07JyzXRSKRoF27dvjjjz+0lp07dw4tWrSodIC2Lurzl5XNmCOEEEIIm4qKitCwYcMq17G4MUjnzp1D165dNa6DJJfL0bZtWzRp0gRnz54F8PSGucXFxfD39xe2/eKLL/DBBx/g/Pnzws10b9y4gTZt2mDWrFlYsmSJ3nWoVCrcvXsX9vb2FnGTPfWYqMzMzGrPm7KEcrGFcrFDjJkAysUaS8vF8zyKiorg5uZW7aUHLK6DBAAjR47Enj17MGPGDPj6+mLDhg1ISkrCkSNHhCt3h4aG4vjx4xqHyYqKitCxY0cUFRVh1qxZsLKywooVK6BUKpGcnMz0zXMLCwvRsGFDvQaWsYRysYVysUOMmQDKxRqWc1ncKTYA2LhxI+bPn49NmzYhLy8P7du3x759+6q9+a29vT2OHTuGGTNmYNGiRVCpVAgNDcXKlSuZ7hwRQgghpG5ZZAfJxsYGS5cuxdKlSytd59ixYzrbmzdvjl27dtVSZYQQQgh5FrB37e9nlLW1NRYsWKDzUgQso1xsoVzsEGMmgHKxhuVcFjkGiRBCCCHEnOgIEiGEEEJIBdRBIoQQQgipgDpIhBBCCCEVUAeJEEIIIaQC6iARQggxOy8vL0yYMMHcZRAioA6Shbl16xbefPNNtGjRAjY2NnBwcECPHj2watUqzJkzBxzHVfsVGhpq7hhaxJirqkwlJSXCekqlEjExMQgNDRVuquzl5YXXX39d570DjbF+/foqf3bqW/SYikqlwvr16zFo0CC4u7ujfv36aNu2LRYtWoTS0tJKt/v777/BcRxsbGyQn5+vc53Q0FC0bdvWpPWmpaVhypQpaNWqFezs7GBnZ4fnn38e77zzDi5fvqxzm9mzZ4PjOIwaNcqktZgSK7mqen1+8MEHdVZHbdP397B8m0QigZubG/r27at1fT8vLy9wHIfevXvrfLyffvpJ2I+p3kuepVzVscgLRT6rfv/9d4wYMQLW1tYYN24c2rZtiydPnuDUqVN4//330bNnT2zatElY/9GjR/jPf/6DoUOH4uWXXxbamzZtao7yKyXGXNVlunr1KtasWYOSkhK8/PLLOHDgAIKDgzFv3jw4OjoiPT0dO3fuxIYNG5CRkYHmzZubpK5PPvkE3t7eWu2+vr4m2b9acXExXn/9dXTt2hVvvfUWXFxckJiYiAULFuDIkSM4evSoznsYbt68Gc2aNUNeXh5iY2MxceJEk9aly759+zBq1CjIZDKMGTMGHTp0gEQiwfXr1/Hzzz9j9erVSEtLg6enp7ANz/PYtm0bvLy88Ntvv6GoqMigm13XBRZz6Xp9mrozbAn0+T3s06cPxo0bB57nkZaWhu+//x7h4eH4/fffMWDAAGE9GxsbJCQk4P79+2jWrJnG/rZs2QIbG5sq/ygxJbHmqhRPLEJqairfoEED3t/fn797967W8pSUFP6rr77SaMvOzuYB8AsWLKijKg0nxlyGZHrnnXd4APzKlSu11lMoFPzSpUv5zMzMGtcUExPDA+DPnz9f433pQy6X86dPn9Zqj46O5gHw8fHxWstUKhXv5eXFv/fee/zQoUP50NBQnfsOCQnh27RpY5I6//nnH75+/fp869atdT5XZWVl/KpVq/iMjAyN9qNHj/IA+KNHj/JWVlb8+vXrTVKPqbCWS5/Xp6enJz9+/Pg6qae26Pt7CIB/5513NNouX77MA+D79u0rtHl6evK9evXiHRwctN4nMzMzeYlEwg8bNqzWf/fFmqs6dIrNQnz55Zd49OgR1q5dC1dXV63lvr6+mDZtmhkqqxkx5tI307///osff/wRffr0wfTp07XWk0qlmDVrlsmOHulDpVJh1apVaNeuHWxsbODs7Iz+/ftrHMZWKBT49NNP4ePjI5wOnDdvHuRyubBOvXr10L17d639Dx06FMDTU2kVnT59Gunp6Rg9ejRGjx6NEydO4N9//62FlP/z5Zdf4vHjx4iJidH5XMlkMkydOhXu7u4a7Vu2bMHzzz+PsLAw9O7dG1u2bKnVOg0l1lwVpaamYsSIEXB0dISdnR26du2K33//XWOdY8eOgeM47NixA/PmzUOzZs1Qv359DBo0CJmZmRrrpqSkYNiwYWjWrBlsbGzQvHlzjB49GgUFBXUZS6d27drByckJaWlpGu02NjZ4+eWXsXXrVo32bdu2oXHjxujXr19dlmkwlnPRKTYL8dtvv6FFixY6P3RYJsZc+mbav38/FAoFxo4dW0eVAQUFBcjJydFo4zgOTZo0AQBERUVh/fr1GDBgACZOnAiFQoGTJ0/i7Nmz6Ny5MwBg4sSJ2LBhA4YPH46ZM2fi3LlzWLx4Mf7++2/s2bOnyse/f/8+AMDJyUlr2ZYtW+Dj44MXX3wRbdu2hZ2dHbZt24b333/fFNF12rdvH3x9fREYGKj3NnK5HLt378bMmTMBAK+88gpef/11nacCzIXVXLpen7peKwCQlZWF7t27o7i4GFOnTkWTJk2wYcMGDBo0CLGxsUJnXO2zzz4Dx3GYM2cOHjx4gK+++gq9e/dGcnIybG1t8eTJE/Tr1w9yuRzvvvsumjVrhjt37mDfvn3Iz89Hw4YNazVn+d9DXfLy8pCXl6fzdPirr76Kvn374tatW/Dx8QEAbN26FcOHD4eVlZXJ6q6OWHNVymzHroigoKCAB8APHjzYoO0s/VSUGHMZkmnGjBk8AP7ixYu1Xpf6ELiuL2tra57n/3d6ZerUqVrbq1Qqnud5Pjk5mQfAT5w4UWP5rFmzhFMzVenduzfv4ODA5+XlabQ/efKEb9KkCf/hhx8Kba+++irfoUMHrX2Y6hSb+rkaMmSI1rK8vDw+Oztb+CouLhaWxcbG8gD4lJQUnud5vrCwkLexsdF5mtQcWMxV1etTreIptunTp/MA+JMnTwptRUVFvLe3N+/l5cUrlUqe53k+ISGBB8A/99xzfGFhobDuzp07eQD8qlWreJ7n+YsXL/IA+F27dpklp/r3kOefnoqKioris7Oz+QcPHvDnzp3je/XqxQPgly9fLqzn6enJR0ZG8gqFgm/WrBn/6aef8jzP89euXeMB8MePH6+T0+tizVUdOsVmAQoLCwHA4gaB1pQYcxmSyRz5v/vuO8THx2t87d+/HwCwe/ducByHBQsWaG2nHlAdFxcHAHjvvfc0lquPOlQ8vVHe559/jsOHD2PJkiVo1KiRxrL9+/fj4cOHeOWVV4S2V155BZcuXcLVq1cND6oH9c+/QYMGWstCQ0Ph7OwsfH333XfCsi1btqBz587CX7z29vaIjIy0mNNRLOfS9fqsTFxcHLp06YKePXsKbQ0aNMDkyZORnp6Oa9euaaw/btw4jd+14cOHw9XVVXhNq48QHTx4EMXFxaaMpaWq30O1tWvXwtnZGS4uLggMDMTp06fx3nvvVXo6fuTIkdi2bRuAp8+lu7s7goKCajVHRWLNVRk6xWYBHBwcAABFRUVmrsS0xJjLkEzmyN+lSxfhVFlFt27dgpubGxwdHSvd/vbt25BIJFqHw5s1a4ZGjRrh9u3bOrfbsWMHPvroI0RFReE///mP1vLNmzfD29sb1tbW+OeffwAAPj4+sLOzw5YtW/D555/rG1Fv6g/LR48eaS378ccfUVRUhKysLLz22mtCe35+PuLi4jBlyhShTgDo0aMHdu/ejZs3b6JVq1Ymr9UQLOeq6vVZ0e3bt3WeQmzdurWwvPwMuJYtW2qsx3EcfH19kZ6eDgDw9vbGe++9hxUrVmDLli0ICgrCoEGD8Nprr5n09BqgX87BgwdjypQp4DgO9vb2aNOmDerXr1/p+q+++iq+/vprXLp0CVu3bsXo0aN1zhStTWLNVRnqIFkABwcHuLm54a+//jJ3KSYlxlyGZPL39wcAXLlyBQEBAbVcmWkZ8gYVHx+PcePGITIyEj/88IPW8sLCQvz2228oLS3V+hADno45UI8fMaWGDRvC1dVV53Ol/uBVf3iq7dq1C3K5HMuXL8fy5cu1ttuyZQuio6NNWqehxJqrLixfvhwTJkzAr7/+ikOHDmHq1KlYvHgxzp49W6eTJQCgefPmlV4HSJfAwED4+Phg+vTpSEtLw6uvvlqL1RlPTLnoFJuFGDhwIG7duoXExERzl2JSYsylb6YBAwZAKpVi8+bNdVRZ1Xx8fHD37l3k5uZWuo6npydUKhVSUlI02rOyspCfn69xTR0AOHfuHIYOHYrOnTtj586dkMm0/+b6+eefUVpaitWrV2PXrl0aX4sWLcLt27dx+vRp04SsIDIyEv/88w+SkpL0Wn/Lli1o27atVp27du1C7969tWbcmItYc5Xn6emJGzduaLVfv35dWF5exdcsz/P4559/4OXlpdHerl07fPTRRzhx4gROnjyJO3fu6OzYW6JXXnkFx44dQ+vWrZn7o6sqlpqLOkgWYvbs2ahfvz4mTpyIrKwsreW3bt3CqlWrzFBZzYgxl76Z3N3dMWnSJBw6dAjffPON1noqlQrLly+v9anuasOGDQPP8zqPFPA8DwCIiIgAAHz11Vcay1esWAHg6Qez2t9//43IyEh4eXlh3759sLW11fm4mzdvRosWLfDWW29h+PDhGl+zZs1CgwYNam0czOzZs2FnZ4c33nhD53Olzg0AmZmZOHHiBEaOHKlV5/Dhw/H666/jn3/+wblz52qlVkOINVd5ERERSEpK0vhD5PHjx1izZg28vLzw/PPPa6y/ceNGjdPZsbGxuHfvnnBxwsLCQigUCo1t2rVrB4lEonEJC0s2ceJELFiwQOdRQJZZai46xWYhfHx8sHXrVowaNQqtW7fWuDrzmTNnsGvXLibvUyTGXIZkWr58OW7duoWpU6fi559/xsCBA9G4cWNkZGRg165duH79OkaPHm2y2vbv3y/8hV1e9+7dERYWhrFjx+Lrr79GSkoK+vfvD5VKhZMnTyIsLAxTpkxBhw4dMH78eKxZswb5+fkICQlBUlISNmzYgCFDhiAsLAzA03FV/fr1Q15eHt5//32twds+Pj7o1q0b7t69i4SEBEydOlVnvdbW1ujXrx927dqFr7/+2uRTe1u2bImtW7filVdegZ+fn3DFaf7/r/K7detWSCQSNG/eHFu3bgXP8xg0aJDOfUVEREAmk2HLli0GTa+vDWLNVd4HH3yAbdu2YcCAAZg6dSocHR2xYcMGpKWlYffu3ZBINP++d3R0RM+ePfH6668jKysLX331FXx9fTFp0iQAwNGjRzFlyhSMGDECrVq1gkKhwKZNmyCVSjFs2DCT1l7V72GLFi2M3q+npycWLlxYg8pqRqy5KmW2+XNEp5s3b/KTJk3ivby8+Hr16vH29vZ8jx49+G+++YYvLS3VWNeSp8NXJMZc+mZSKBT8f//7Xz4oKIhv2LAhb2VlxXt6evKvv/66yS4BUNU0XAB8TEyMUMvSpUt5f39/vl69eryzszM/YMAA/s8//xT2VVZWxkdHR/Pe3t68lZUV7+7uzs+dO1cjU1paWpWPp56uvXz5ch4Af+TIkUprX79+PQ+A//XXX3meN+2VtNX++ecf/j//+Q/v6+vL29jY8La2try/vz//1ltv8cnJyTzP83y7du14Dw+PKvcTGhrKu7i48GVlZSatz1is5DL2Stq3bt3ihw8fzjdq1Ii3sbHhu3Tpwu/bt09jHfU0/23btvFz587lXVxceFtbWz4yMpK/ffu2sF5qair/xhtv8D4+PryNjQ3v6OjIh4WF8YcPHzZ5zup+D6HjitO6qKfD6/OY5prmz3Ku6nA8X+5YLCGEEMKQY8eOISwsDLt27cLw4cPNXQ4RERqDRAghhBBSAXWQCCGEEEIqoA4SIYQQQkgFNAaJEEIIIaQCOoJECCGEEFIBdZCeEceOHQPHcTh27Ji5SyHErBYuXGj0bU02bdoEf39/WFlZad2QlxB91OT1R+oWdZAIIUQP169fx4QJE+Dj44OffvoJa9asMXdJBvv++++xfv16c5dBROLMmTNYuHAh8vPzjdre0l+PdCVtQgjRw7Fjx6BSqbBq1Sr4+vqauxyjfP/993BycmLu6vXEMp05cwbR0dGYMGGCUUdULf31SEeQLJxKpUJpaam5yyDkmffgwQMAoFNrhDwjqINUR9Tnna9fv46RI0fCwcEBTZo0wbRp0zQ6QBzHYcqUKdiyZQvatGkDa2trHDhwAABw584dvPHGG2jatCmsra3Rpk0brFu3Tuux/v33XwwZMgT169eHi4sLZsyYYbKbMapz3Lx5E6+99hoaNmwIZ2dnzJ8/HzzPIzMzE4MHD4aDgwOaNWumdfNBuVyOBQsWwNfXF9bW1nB3d8fs2bO16ouJiUF4eDhcXFxgbW2N559/HqtXr9aq548//kC/fv3g5OQEW1tbeHt744033jBJVsK+U6dO4cUXX4SNjQ18fHzw448/6lxv8+bN6NSpE2xtbeHo6IjRo0cjMzNTWO7l5YUFCxYAAJydncFxXK3fO+r27dt4++234efnB1tbWzRp0gQjRoxAenq6xnqVjWlZv349OI4T1vfy8sLVq1dx/PhxcBwHjuMQGhoqrJ+amooRI0bA0dERdnZ26Nq1q9Y99ohhqnv9hYSEoEOHDjq39fPzQ79+/QAA6enp4DgOy5Ytw5o1a+Dj4wNra2u8+OKLOH/+fK3n0GXhwoV4//33AQDe3t7Cayo9PR0KhQKffvqpUKeXlxfmzZun8T5f3evREtAptjo2cuRIeHl5YfHixTh79iy+/vpr5OXlYePGjcI6R48exc6dOzFlyhQ4OTnBy8sLWVlZ6Nq1q9CBcnZ2xv79+xEVFYXCwkJMnz4dAFBSUoJevXohIyMDU6dOhZubGzZt2oSjR4+aNIf6Rq1LlizB77//jkWLFsHR0RE//vgjwsPD8cUXX2DLli2YNWsWXnzxRQQHB0OlUmHQoEE4deoUJk+ejNatW+PKlStYuXIlbt68iV9++UXY/+rVq9GmTRsMGjQIMpkMv/32G95++22oVCq88847AJ7+Rd+3b184Ozvjgw8+QKNGjZCeno6ff/7ZpFkJm65cuSK8PhYuXAiFQoEFCxagadOmGut99tlnmD9/PkaOHImJEyciOzsb33zzDYKDg3Hx4kU0atQIX331FTZu3Ig9e/Zg9erVaNCgAdq3b1+r9Z8/fx5nzpzB6NGj0bx5c6Snp2P16tUIDQ3FtWvXYGdnZ9D+vvrqK7z77rto0KABPvzwQwAQfhZZWVno3r07iouLMXXqVDRp0gQbNmzAoEGDEBsbi6FDh5o8n9jp8/obO3YsJk2ahL/++gtt27YV2s+fP4+bN2/io48+0tjn1q1bUVRUhDfffBMcx+HLL7/Eyy+/jNTUVJPf6Lk6L7/8Mm7evIlt27Zh5cqVcHJyAvD0D4iJEydiw4YNGD58OGbOnIlz585h8eLF+Pvvv7Fnzx4AVb8eLYbZ7gL3jFmwYAEPgB80aJBG+9tvv80D4C9dusTz/NOb/UkkEv7q1asa60VFRfGurq58Tk6ORvvo0aP5hg0b8sXFxTzP8/xXX33FA+B37twprPP48WPe19eXB8AnJCSYJMfkyZOFNoVCwTdv3pznOI5fsmSJ0J6Xl8fb2toKN6DctGkTL5FI+JMnT2rs84cffuAB8KdPnxba1HnK69evH9+iRQvh+z179pj9ZobEcg0ZMoS3sbHRuGHptWvXeKlUyqvf+tLT03mpVMp/9tlnGtteuXKFl8lkGu3q1352dnad1K/rdyAxMZEHwG/cuFGrrorUN/tMS0sT2tq0acOHhIRorTt9+nQegMbvZlFREe/t7c17eXnxSqWyZmGeQfq8/vLz83kbGxt+zpw5GttOnTqVr1+/Pv/o0SOe5/93c+gmTZrwubm5wnq//vorD4D/7bff6iCRtqVLl2q9xpKTk3kA/MSJEzXWnTVrFg+AP3r0qNBW2evRUtAptjqmPvqh9u677wIA4uLihLaQkBA8//zzwvc8z2P37t146aWXwPM8cnJyhK9+/fqhoKAAFy5cEPbj6uqqcdNGOzs7TJ482aQ5Jk6cKPxfKpWic+fO4HkeUVFRQnujRo3g5+eH1NRUAMCuXbvQunVr+Pv7a2QIDw8HACQkJAjb2traCv8vKChATk4OQkJCkJqaioKCAmH/ALBv3z6UlZWZNB9hm1KpxMGDBzFkyBB4eHgI7a1btxZOWwDAzz//DJVKhZEjR2q8Jps1a4aWLVtqvCbrWvnfgbKyMjx8+BC+vr5o1KiR8PtuKnFxcejSpQt69uwptDVo0ACTJ09Geno6rl27ZtLHEzt9X38NGzbE4MGDsW3bNvD/f81mpVKJHTt2CMMkyhs1ahQaN24sfB8UFAQAwnusJVB/lr333nsa7TNnzgQApk7bUgepjrVs2VLjex8fH0gkEo1xBd7e3hrrZGdnIz8/H2vWrIGzs7PG1+uvvw7gfwNIb9++DV9fX60xCX5+fibNUf6XHnj6i25jYyMcZi3fnpeXBwBISUnB1atXtTK0atVKIwMAnD59Gr1790b9+vXRqFEjODs7Y968eQAgdJBCQkIwbNgwREdHw8nJCYMHD0ZMTIzJxlsRdmVnZ6OkpETr9w3Q/F1ISUkBz/No2bKl1uvy77//1nhN1rWSkhJ8/PHHcHd3h7W1NZycnODs7Iz8/Hzhd8BUbt++rfM9onXr1sJyoj99X38AMG7cOGRkZODkyZMAgMOHDyMrKwtjx47V2rbi+666s6R+j7UEt2/fhkQi0Zrp2axZMzRq1Iip1xKNQTIzXYMry//lCDydyQYAr732GsaPH69zP7U9HqIiqVSqVxsA4S8jlUqFdu3aYcWKFTrXc3d3BwDcunULvXr1gr+/P1asWAF3d3fUq1cPcXFxWLlypfDz4DgOsbGxOHv2LH777TccPHgQb7zxBpYvX46zZ8+iQYMGpohKREylUoHjOOzfv1/n69ecr6F3330XMTExmD59Orp164aGDRuC4ziMHj1a+B0AdL+HAE+PRBDL169fPzRt2hSbN29GcHAwNm/ejGbNmqF3795a61b3HmtJxHAxTOog1bGUlBSNI0T//PMPVCoVvLy8Kt3G2dkZ9vb2UCqVOn9pyvP09MRff/0Fnuc1XqA3btyoce015ePjg0uXLqFXr15V/vL89ttvkMvl2Lt3r8ZfTJWd7ujatSu6du2Kzz77DFu3bsWYMWOwfft2jdOA5Nni7OwMW1tbpKSkaC0r/7vg4+MDnufh7e0tHMm0FLGxsRg/frzGTNDS0lKti/KpjyLk5+drXIJA11/qlf3eeXp66nyPuH79urCc6E/f1x/wtNPz6quvYv369fjiiy/wyy+/YNKkSZV2hiyJrteTp6cnVCoVUlJShCOQwNOJAPn5+RqvJUvvRNEptjr23XffaXz/zTffAAAGDBhQ6TZSqRTDhg3D7t278ddff2ktz87OFv4fERGBu3fvIjY2VmgrLi62iKv+jhw5Enfu3MFPP/2ktaykpASPHz8G8L+/ksr/VVRQUICYmBiNbfLy8rT+cgoICAAAOs32jJNKpejXrx9++eUXZGRkCO1///03Dh48KHz/8ssvQyqVIjo6Wuu1xPM8Hj58WGc1VySVSrVq+uabb7SODPn4+AAATpw4IbQ9fvwYGzZs0Npn/fr1dV71OCIiAklJSUhMTNTYx5o1a+Dl5aUxJpJUT9/Xn9rYsWORl5eHN998E48ePcJrr71Wl+UaTT1GqvxrKiIiAsDTWWrlqc8cREZGamxv7FW46wIdQapjaWlpGDRoEPr374/ExERs3rwZr776aqXXwlBbsmQJEhISEBgYiEmTJuH5559Hbm4uLly4gMOHDyM3NxcAMGnSJHz77bcYN24c/vzzT7i6umLTpk0GTwmuDWPHjsXOnTvx1ltvISEhAT169IBSqcT169exc+dOHDx4EJ07d0bfvn1Rr149vPTSS8Ibxk8//QQXFxfcu3dP2N+GDRvw/fffY+jQofDx8UFRURF++uknODg4CL+k5NkVHR2NAwcOICgoCG+//TYUCgW++eYbtGnTBpcvXwbwtHOxaNEizJ07F+np6RgyZAjs7e2RlpaGPXv2YPLkyZg1a5ZZ6h84cCA2bdqEhg0b4vnnn0diYiIOHz6MJk2aaKzXt29feHh4ICoqCu+//z6kUinWrVsHZ2dnjQ9nAOjUqRNWr16NRYsWwdfXFy4uLggPD8cHH3yAbdu2YcCAAZg6dSocHR2xYcMGpKWlYffu3ZBI6G9pQ+nz+lPr2LEj2rZtK0xkeeGFF8xUtWE6deoEAPjwww8xevRoWFlZ4aWXXsL48eOxZs0a5OfnIyQkBElJSdiwYQOGDBmCsLAwje11vR4thjmmzj2L1FNxr127xg8fPpy3t7fnGzduzE+ZMoUvKSkR1gPAv/POOzr3kZWVxb/zzju8u7s7b2VlxTdr1ozv1asXv2bNGo31bt++zQ8aNIi3s7PjnZyc+GnTpvEHDhww6TT/ilOdx48fz9evX19r/ZCQEL5NmzbC90+ePOG/+OILvk2bNry1tTXfuHFjvlOnTnx0dDRfUFAgrLd3716+ffv2vI2NDe/l5cV/8cUX/Lp16zSmlF64cIF/5ZVXeA8PD97a2pp3cXHhBw4cyP/xxx81ykjE4/jx43ynTp34evXq8S1atOB/+OEHndPid+/ezffs2ZOvX78+X79+fd7f359/5513+Bs3bgjr1PU0/7y8PP7111/nnZyc+AYNGvD9+vXjr1+/znt6egqXzlD7888/+cDAQL5evXq8h4cHv2LFCp3T/O/fv89HRkby9vb2PACNKda3bt3ihw8fzjdq1Ii3sbHhu3Tpwu/bt69OsoqVvq8/nuf5L7/8kgfAf/7551rL1NP8ly5dqrUMAL9gwYLaKF8vn376Kf/cc8/xEolEeL2VlZXx0dHRvLe3N29lZcW7u7vzc+fO5UtLSzW2rer1aAk4nrfA0V0itHDhQkRHRyM7O1trphchhJBn26pVqzBjxgykp6drzVYj5kHHTQkhhBAz4nkea9euRUhICHWOLAiNQSKEEELM4PHjx9i7dy8SEhJw5coV/Prrr+YuiZRDHSRCCCHEDLKzs/Hqq6+iUaNGmDdvHgYNGmTukkg5NAaJEEIIIaQCGoNECCGEEFIBdZAIIYQQQiqgDhIhhBBCSAXUQSKEEEIIqYA6SIQQQgghFVAHiRALsH79enAch/Xr15u7FL0sXLgQHMfh2LFj5i7FoqWnp4PjOEyYMMEsjx8aGmrxd0yva+Z+Tgg7qINESB1g7U352LFj4DgOCxcuNHcpFo86IYbz8vL6v/buPSjK6o0D+PeFRZaryy6gkrbgDlqpYBBehpswTAxLBAaiMTkYTY2XQIuippwhm0wtRIkZaGpGvEylFZZhYzdALgIaZDkqEoiggaKCCmEIwvP7w9m3fXcX2EV+6dTzmWFGzzl73uecF5fHPee8wNPT816HwdiI+EGRjN0HFi9ejAULFmDKlCn3OhSzvPjii1i2bBn/WgTG2L8WJ0iM3QcmTpyIiRMn3uswzObq6sq/dJkx9q/GS2yMmVBeXo6YmBi4urrC1tYW3t7eWL9+PW7evGnUtrCwEKGhoXB3d4dcLoeHhwciIiJQWFgI4M7+Ii8vLwDArl27IAiC+KXbwzPcHiRBELBo0SK0tbUhKSkJrq6ucHJyQnR0NJqbmwEA9fX1iIuLg1KphJOTExISEtDR0WEU544dOxAbGwtPT0/I5XIolUpERkaitLRU0u6tt95CWFgYAGDDhg2SeFtaWsQ2w+1BKioqQlhYGCZOnAg7Ozv4+voiOzsbt2/flrTTX3ZsamrC4sWL4eLiAgcHB0REROC3334b+SbpWbFiBQRBQHNzM7KysjBjxgzY2dnhkUcewd69ewEA/f39ePPNN8Xx+/j44NChQyb76+npQWZmJmbNmgU7OzsoFApERkaisrJS0k4QBJSVlYl/1n2ZWkq1ZIwnT55EYmIi3N3dYWtrCy8vL6xbtw6dnZ0m21dWViI0NBQODg5QqVRYunQpLly4YO70SRQUFGD+/PlwdHSEo6Mj5s+fb3JvnP4ybFVVFR5//HEoFIoRlxt197y1tRWtra2SOTNczjU3jrG2Z2w0/AkSYwby8/OxZs0aKBQKxMTEwN3dHbW1tdi4cSNKS0tRWlqKCRMmiG1Xr16NKVOmYPHixVCpVLh06RKOHTuGr776CvHx8Zg7dy7Wrl2LnJwc+Pr6Ii4uTryWOfswrl27hqCgIEyePBnJycn4/fffcfDgQZw5cwYHDhxAcHAw/P39kZKSgrq6OhQWFqKrqwslJSWSftasWQNfX19ERETAzc0NbW1t+PrrrxEREYH9+/cjNjYWwJ09NS0tLdi1axdCQ0OxaNEisQ+FQjFirNnZ2UhPT4dSqURSUhIcHBzwzTffID09HRUVFdi/f7/RD9CWlhYsWLAAs2bNQkpKCs6ePYsDBw4gLCwM9fX1mDRp0qhzpPPyyy/j6NGjiImJgbW1Nfbu3YukpCS4uLggNzcXp0+fRnR0NPr6+vDpp58iNjYW9fX10Gg0Yh9dXV0ICQnBqVOnEBgYiJUrV6K7u1uM6YsvvhDvYWZmJnbu3InW1lZkZmaKfcydO3fMY6ysrERkZCT6+/uRkJAAT09PVFdXIycnBwcPHkRNTY3k07vi4mJERUXBysoKS5cuhYeHB4qLixEYGAgXFxez5w4A0tLSkJubiwceeADPPfccgDv/AXj22Wdx/Phx5OTkGL2mqqoK7777LsLCwvDCCy/g/Pnzw/avUCiQmZmJ7du3AwDWrVsn1ul/n1kax1jiZmxUxBgTnTp1imQyGfn6+tLVq1cldZs2bSIAlJWVJZb5+fnRhAkTqKOjw6gv/defO3eOAFBycrLJ6xYUFBAAKigokJQDIAD00ksvScpXrVpFAEihUND27dvF8qGhIdJqtQSA6urqJK9pbm42um57ezt5eHiQt7e3pLy0tJQAUGZmpsl4MzMzCQCVlpaKZU1NTSSTycjd3Z3Onz8vlvf19VFQUBABoN27d4vlujkBQJs3b5b0v379egJAmzZtMnl9Q8nJyQSAZsyYQZcvXxbLjx49Ks5TUFAQ/fnnn2Ldvn37CAClpqZK+kpKSiIA9PHHH0vKOzo6aNq0aeTm5kZ//fWXWB4aGkrDvZVaOsbBwUHSaDQEgL777jtJ+1dffZUAUEpKiqT99OnTSRAEqqioEMuHhobEcZj7Nl9WVkYA6OGHH6br16+L5V1dXTRjxgwCQOXl5WK57nsEAO3YscOsa+io1WpSq9XjEoel7Uf7t8iYDidIjOlJS0szekPVGRwcJDc3N/L39xfL/Pz8yMHBgbq6ukbs924SJEdHR+rt7ZWUl5eXEwDSaDQ0NDQkqdu9e7dFP7RSU1MJALW0tIhlY0mQ3n77bQJAW7ZsMWp/5MgRAkDh4eFimW5OvLy8aHBwUNJeV/fUU0+ZNQZdgrRr1y6juunTpxMAKisrk5Tfvn2bbGxsKCQkRCy7cuUKWVtbS+LU98EHHxAAKioqEsvMSZDMHaPuvkZFRRn11dPTQ0qlkuRyOd26dYuI/k4OYmJijNq3tLSQtbW12QlSSkoKAaB9+/YZ1X3yySdGyZnue8TPz8+s/vWNlCBZGoel7TlBYubiJTbG9NTU1AAAvv/+exQXFxvV29jY4MyZM+Lfly1bhoyMDMyePRtJSUkICwtDUFAQnJ2dxy0mb29v2NvbS8p0p918fHyMlqx0de3t7ZLy5uZmbNq0CSUlJWhra8OtW7ck9e3t7VCr1WOO8/jx4wCkSyU6CxcuhFwux6+//mpUN3fuXFhZSbdDTp06FQBw/fp1i2IwXNoC7sxHc3OzUZ21tTXc3d0l8/Tzzz9jcHAQt27dMvmIg8bGRgDAmTNn8MQTT1gUlzljHGkOHR0d8dhjj+GHH35AQ0MD5syZI+5hCg4ONmqvVqsxbdo0cd/YaEa6tm5Pmqn7FxAQYFb/5rI0jrHGzdhoOEFiTE9XVxcAYOPGjWa1f+WVV6BSqZCfn4+tW7ciKysLMpkM0dHR2LZtm7g5+26YSrZkMtmodQMDA2JZU1MT5s2bh+7uboSFhSEmJgbOzs6wsrLC4cOHUVZWZpQwWaq7uxsATO4ZEgQBkyZNQltbm1HdSGMYHBy0KIaxzJX+POnu/5EjR3DkyJFhr9Pb2ztucemPcaQ5BP5OfnXtbty4AQBwd3c32X7SpElmJ0jd3d2wsrKCm5ubyX4EQRCva1g3niyNY6xxMzYaTpAY06P7Qdbd3Q0nJ6dR2wuCgJSUFKSkpKCzsxMVFRX47LPP8Pnnn6OxsREnTpyAtbX1/zvsUW3btg3Xrl3Dnj178Mwzz0jqVq5cKZ7Euhu6uevo6DD6JIqI0NHRMa6frP0/6OJLT09HVlbWPbu+qVOIAHDp0iVJO92jIS5fvmyy/XD9DHftoaEhXLlyxSjhunz5MojI5P0b74dkWhrHWONmbDR8zJ8xPfPnzwfw91KbJVQqFeLi4rBv3z6Eh4fj9OnTaGpqAgAxSbL0E5HxcvbsWQAQT6rpEJHJT0rGEu+jjz4KACaP/h89ehR9fX0ml8DuJwEBARAEAdXV1Wa/Zjzv7Uhz2Nvbi9raWtjZ2WHmzJkAAF9fXwBARUWFUfvW1laLjvqPdG1d2XjdP2tr62Hny9I4/sm42X8LJ0iM6Vm9ejVkMhlSU1NNHle+fv26uOcBuPMGTESSNgMDA+JSjVwuBwC4uLhAEIQxP5vmbuk+0TF8js/mzZtx8uRJo/ZKpRIALIo3KSkJMpkM2dnZkn09/f39eO211wDgvv9VK5MnT0ZiYiKqqqrw/vvvG91b4E6yp/88rLHM1XACAwOh0Whw6NAh/PTTT5K6d955B52dnXj66afFx0wEBQXBy8sLBw8elNxbIsIbb7xhUdKWnJwM4M6zr/SXpG7cuIENGzZI2twtpVKJq1evoq+v767j+CfjZv8tvMTGmJ7Zs2cjLy8Pq1atwsyZM6HVaqHRaNDT04Pm5maUlZVhxYoV+PDDDwEAcXFxcHZ2xoIFC6BWqzEwMIAff/wRp0+fRkJCgpiYODo6IiAgAOXl5Vi+fDm8vb1hZWWF5cuX39XGaHOtXLkSBQUFiI+PR2JiIlQqFWpqavDLL78gOjoa3377raT9Qw89BA8PD+zduxe2traYOnUqBEFAamrqsE/81mg02LJlC9LT0+Hj44PExEQ4ODigqKgIDQ0NiI2NNVreux/l5eWhoaEBGRkZ2LNnDxYuXAiFQoELFy6gtrYWjY2NuHjxorhxPjw8HF9++SXi4+MRFRUFuVwOX19fxMTEWHxtKysr7Ny5E5GRkdBqtViyZAnUajWqq6tx+PBhaDQabN68WdL+o48+glarRUREhPgcpJKSEly8eBE+Pj44ceKEWdcOCQlBamoqcnNzMXv2bMTHx4OIUFhYiD/++ANpaWkICQmxeEymhIeHo7a2FlFRUQgODsaECRMQEhIiflkSxz8ZN/uPuUen5xi7rx07doyWLVtGHh4eZGNjQ66uruTn50evv/461dfXi+3y8vLoySefJLVaTXK5nFQqFc2bN4/y8/Opv79f0mdDQwNptVpSKBQkCILkmPxIx/xDQ0ON4hvpqPJwR/RLS0spMDCQnJycSKFQkFarpbq6OpNH9omIampqKDQ0lJycnMTn3Zw7d46ITB/z1zlw4ID4OltbW5ozZw5t3bqVBgYGzB7DSGM3RXfMXxefvpGO4Q933PzmzZv03nvvkb+/Pzk4OJCdnR15eXlRXFwc7d69WzKWgYEBysjIoAcffJBkMplkTGMd44kTJyghIYFcXV3JxsaG1Go1rV27lq5cuWKyn/LycgoJCSE7OztSKpW0ZMkSam1tHXHsw9mxYwcFBASQvb092dvbU0BAgMlHRoz2KIiR9PT00PPPP09TpkwRH0Vg2I+5cVjano/5M3MJRCY+Q2aMMcYY+w/jPUiMMcYYYwY4QWKMMcYYM8AJEmOMMcaYAU6QGGOMMcYMcILEGGOMMWaAEyTGGGOMMQOcIDHGGGOMGeAEiTHGGGPMACdIjDHGGGMGOEFijDHGGDPACRJjjDHGmAFOkBhjjDHGDPwPKliM2TULJHMAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "exp='mnist'\n", + "file_name = 'res-sophIA-mnist.json'\n", + "name = 'mnist'\n", + "plot_all_error_bar(folder, file_name, name, exp, meas_calc_list, to_save=do_save)\n", + "# plot_evolution(folder, file_name, name, exp, meas_calc_list, to_save=do_save)\n", + "\n", + "new_file_name = 'res-sophIA-mnist_dyn-EPM.json'\n", + "total_to_dynamic_EPM(exp,folder, file_name, new_file_name, meas_calc_list, idle_power)\n", + "plot_all_error_bar(folder, new_file_name, name+'_dyn-EPM', exp, meas_calc_list_2, to_save=do_save)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "aed4483c", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAk0AAAHPCAYAAABOau4WAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAACrJklEQVR4nOzdd1gU1/oH8O9soUlRmoJSFGvUiL1LsZdYYzTG2L2JGks0saQZTSyJmqbXFGOwG1tiLNgBJYId1MSuNBUVFASkbTm/P/ztXpZdYHdYYGfn/TwPz705Mzu8X2ZlDzPnnOEYYwyEEEIIIaRUkqougBBCCCFECKjTRAghhBBiBOo0EUIIIYQYgTpNhBBCCCFGoE4TIYQQQogRqNNECCGEEGIE6jQRQgghhBiBOk2EEEIIIUagThMhhBBCiBGo00TMzt/fHxzHlfr13XffAQCCg4PBcRyioqKqtGYhGjduHDiOw4YNG6q6lFIZOv8SiQQuLi5o27Ytli1bhtzc3Kou02SJiYngOA7+/v5VXYpoXbp0CRMmTED9+vVhb28PBwcH+Pn5oXPnzvjggw9w7NixEl/LGMOOHTswdOhQ+Pj4wM7ODjVq1EBgYCDmzp2L5OTkEl+r+bc3bty4UuvbsGGDUe+Rx48fw8bGBhzHoXXr1qXuC+j/jpVIJHByckKdOnUQEhKCDz74AOfOnSvzOMR0sqougFivzp07o379+ga3vfLKK5VcTfmMGzcOGzduRFhYWJm/KIlhvXv3Rq1atQAASqUSKSkpiImJwYULF7BlyxZER0fD1dW10urx9/dHUlISEhISqOMjQKtXr8asWbOgVqtRu3ZthISEoEaNGkhLS8OlS5cQExODqKgo9OzZU++1Dx8+xJAhQ3Du3DltR6Vz587Izc1FbGwsVqxYgR9++AGrVq3CtGnTKjzLpk2boFAoALzsCF6+fBktWrQo83VFf8fm5eUhPT0dcXFxiIqKwqpVqxAUFITffvsN9erVq9D6RYURYmZ+fn4MAAsLCytz36SkJHb9+nX24sWLii+sHMaOHWt0psry8OFDdv36dZaZmVnVpZQKAAPAIiMj9bbdvHmTubu7MwBszpw5lVqX5n2akJDA6/UJCQkMAPPz8zNrXaRsly9fZhKJhAFg3377LVMqlTrbVSoVO3nyJFuyZInea589e8bq1avHALCWLVuyf/75R2e7QqFgK1euZFKplAFg33//vd4xNL8Pxo4dW2qdYWFhRr1HGjduzACw2rVrMwBs+vTppe5f2u9YtVrNDh48yBo0aMAAsJo1a7J79+6VejxiPLo9R6qUr68vGjduDAcHh6ouRXC8vLzQuHFjuLi4VHUpvDVs2BDvvPMOAODEiRNVXA0Ril27dkGtVqNjx46YNWsWpFKpznaJRIJu3brho48+0nvte++9h3v37qFu3bqIiIhA06ZNdbbLZDLMmTMH33//PQDggw8+wI0bNyosy+nTp3Hjxg3UqFEDv/32GwBg69atKCgo4HU8juPQr18/nDt3Dg0aNMDjx48xadIkc5YsatRpIlWqpDFNRcfrJCQk4O2330atWrVga2uLgIAAfPLJJ6X+Url48SLeeust+Pr6wtbWFq6urujduzfCw8NNqk8zbmXjxo0AgPHjx+uMJfj888919ivtNo9mHEJiYmKJ7ZGRkejVqxdq1KgBe3t7tGrVCps2bTJ4vJLGNH3++efa2tLS0jBt2jT4+PjAxsYGPj4+mD59OjIzMw0ekzGG3377DW3atIGDgwPc3NzQt29f7a0OjuMQHBxsxE/OeEVv2RmiVCrx66+/Ijg4GK6urrC1tUXdunUxZcoUpKSk6O1ftM7c3Fx89tlnaNKkCRwcHODv768dZ5KUlAQAqFu3rs455TO+TqlU4uuvv0bTpk1hb28Pd3d3vPHGGyV+2J47dw5z585Fu3btUKtWLdjY2KBmzZp47bXXcPz48RK/z65du9CjRw+4ublBLpfDzc0Nr7zyCiZPnowrV64YfM3u3bvRp08feHh4wMbGBrVr18bo0aNx7do1k3MCwP379zF9+nQ0aNAAdnZ2cHFxQefOnfHzzz9DpVLp7a/5eY8bNw4vXrzAggULUL9+fdja2qJWrVoYO3YsHjx4YFINjx8/BgB4enqa9Lp79+7h999/BwCsXLkS1atXL3HfqVOnokWLFlAoFFixYoVJ38cUv/76KwDgrbfeQs+ePVG/fn08e/YMf/75Z7mOW716de3Y0YiICFy8eLG8pRJQp4lYuPj4eAQGBiI6OhpBQUHo1q0bUlNTsWTJEowcOdLga77//nu0a9cO27Ztg5ubGwYOHIimTZsiKioK/fv3x+LFi43+/o6Ojhg7diwCAgIAvBxDMHbsWO1XYGCgOWICAH777Td0794dz549Q58+fRAYGIi4uDiMHTtW+8vPFCkpKWjVqhX27NmDdu3aoWfPnsjOzsaaNWvQq1cv7RiKoqZNm4aJEyciLi4O7dq1Q69evZCSkoJu3brhwIEDZkipTzNgtfhf/ACQnZ2Nnj17YvLkybh48SJeffVVDBw4ELa2tvjpp5/QsmVLxMXFGTxufn4+goOD8c0336Bu3boYOHAgGjRogPr162Ps2LGoVq0aAGDYsGE651TTiTPFiBEj8Mknn8Db2xuDBw+Gi4sLdu3ahbZt2yI2NlZv/48++girVq1Cfn4+WrdujcGDB6NOnTo4cOAAevbsqb3KUdTixYvxxhtv4OTJk2jWrBmGDx+ODh06QCqVYv369YiIiNDZX6lUYsSIERg+fDiioqLQsGFDDB48GB4eHti6dSvatGmDw4cPm5Tz/PnzaNGiBdasWYPCwkIMHjwYnTp1wqVLl/Duu++if//+KCwsNPja58+fo1OnTvjpp5/wyiuvoG/fvmCMYdOmTejcuTOeP39udB2+vr4AXl6d/Oeff4x+3f79+6FWq1G9enUMHDiw1H05jsPbb78NANi3bx8YY0Z/H2NlZ2dj165dAIAJEyaA4ziMHz8eALRXncqjb9++2nGCpQ2KJyao4tuDxAqZMqYpKCjI4HgXzZgBAOzjjz/WGbNw9epVVq1aNQaAxcTE6Lzu8OHDjOM45u7uzk6ePKmz7cqVK6xOnToMAIuKijIpU1ljmowZ31LSGBpNu1wuZ/v379fZphkT4eLiwnJzc42qaeHChdqf3bhx41h+fr52W3JysnbcxLZt23Re99dffzEAzNHRkZ0+fVpn26pVq7THDAoKKjGjIZrXFT3HCoWCJSQksEWLFjGO45iDgwO7cOGC3mtHjRrFALABAwawx48f62z79ttvGQDWoEEDnfdHZGSk9nu++uqrLDU11WBd5hrTBIC5u7uzy5cva7cplUo2ffp07Xui6DlgjLHw8HD28OFDvWPGxMQwZ2dnJpfL2f3797Xt+fn5zN7enjk6OrIbN27ovS4xMZFdv35dp+2jjz5iAFj79u31xrTs2rWLSaVSVqNGDZaRkWFU3vz8fO3P7N1332WFhYXabXfv3mX+/v4MAPvoo490Xqd5DwNgvXv3Zs+fP9due/bsGQsMDGQA2NKlS42qg7GX72MnJycGgMlkMtavXz/21VdfsWPHjpU6xu/tt99mAFhISIhR3+fkyZPa2ou+T8w1pumXX35hAFhgYKC27f79+0wqlTKJRMISExMNvs6U37E9evRgANjo0aPL3JeUjTpNxOw0/6BL+ir6oVtWp6l169ZMrVbrfY93332XAWCLFy/WaW/fvj0DwHbv3m2wtp07dzIAbNiwYSZlqoxO0+zZsw2+TjNI9NSpU0bVpOk01alTx+AA++XLlzMAbMKECTrtoaGhDABbsGCBwTratm1brk5TSV+9e/dmV65c0XvdtWvXGMdxzNvbm2VlZRk8dr9+/RgAnc5m0U5T8Z9ZUebsNH333Xd62/Pz87Ud1K1btxp93AULFjAA7L///a+27cmTJ9pOoDGePn3K7O3tmZ2dnU7nq6ipU6cyAGz16tVGHXPz5s0MAPP29tbrBDLG2O7duxkA5uTkxPLy8rTtmo5DtWrVDHYUf//9dwaAhYaGGlWHRmxsrPbfRtEviUTCOnXqxH7//Xe91/Tp04cBYCNHjjTqe9y4cUN73HPnzmnbzdVp0vy+Kn4ONO/rhQsXGnydKZ2mkSNHMgCsb9++Ze5LykZLDpAKU9KSA40bNzb6GAMGDADHcXrtTZo0AQCdsRDp6ek4d+4c7O3t8dprrxk8nmY8TkxMjNE1VJaSam7SpAlu3Lhh8riP7t27Gxxgb+hnp1QqtT+Tt956y+DxRo0ahfPnz5tUQ1FFlxxgjOHx48eIi4vDkSNHtLdpatasqd0/PDwcjDH07dsXTk5OBo8ZHByM8PBwxMTEYMCAATrbPD090bVrV971mmLs2LF6bba2thgxYgS++eYbREVFYdSoUTrbnz59ioMHD+Kff/5BRkaG9nbp7du3AQA3b97U7uvh4QF/f39cuXIFc+bMwcSJE0tdtiMyMhJ5eXno3r07ateubXCf4OBgrF27FjExMXjvvffKzKgZ6zVy5EjY2trqbR86dChq1KiBjIwMXLx4EZ07d9bZ3qZNG3h5eem9ztD70RgdOnTAv//+i5MnT+Lw4cM4f/48Ll26hOfPnyMmJgYxMTE4dOhQudYxY0VuyRkar1Ue//zzD86ePQtbW1u9f3MTJkxAeHg4NmzYgM8++wwSCf+RNGq1GgAM/h4lpqNOE6kwkyZNKveaRpqxC8U5OzsDeDluRSMhIQGMMeTl5Rn8pV5UWlqa9v8vX77c4IDdlStXwt3dnU/ZvJiS1dzHS09P1/53SYPZy7uW0fz58/UGkefl5eHdd9/Fpk2b0Lt3b1y8eFE7E+revXsAgPXr12P9+vWlHrvo+TRHvenp6fjggw/02hs3boz58+frtFWvXr3EAcV169YF8HLwdFHr1q3D+++/jxcvXpRYQ1ZWls5/b9q0Ca+//jq++eYbfPPNN3B1dUX79u3Rs2dPvP322zrvVc3P7sSJE2V+WBr62Rmi6dRoMhXHcRzq1q2LjIwMgx0gc7+/gZez5EJCQhASEgLgZccmNjYWixcvxrFjx7Bx40b0798fw4cPBwDtz0gzkLwsT5480f5/Dw8P7f/X/EyLdqoM0Ww3dA407+nBgwejRo0aOtsGDhwId3d3JCUl4cSJEwbXmjJWeno6AFTqGmjWjDpNxKKZ8heW5i8qR0dHDBs2zOjXHT58GCdPntRr//zzz83aadLUV5Ly/DVZGceriL9U7e3tsXr1amzevBmXL1/G4cOH0b9/fwD/+3kFBgaWudBf+/btDR6br5ycHO2MyaKCgoL0Ok3GKPrhevHiRbzzzjuQSqX46quv8Nprr8HX1xcODg7gOA6//PIL3nnnHb0P5K5duyIxMREHDx7EyZMnERMTgyNHjuDQoUNYuHAh/vzzT3Tv3h3A/3529evX17viU5wpV37Lw9zvR0OkUim6dOmCQ4cOoV27drh06RL27t2r7TS1bt0aW7ZswaVLl6BUKiGTlf4RqJmk4OLiotNZ1EwiKK3TC7x8HwEvfycVVVhYiC1btgB4Obi+S5cueq/VXNlav349704TY0w7UaJ58+a8jkF0UaeJWA0fHx8ALz/cf/vtN6N/SZvjES42NjYAXs6GMUShUCA1NbXc36eiuLm5wdbWFgUFBUhKSjJ466f4Ugnm4uzsDDc3N6Snp+P69evaTpPmfHbu3Blr1qypkO9dEn9/f6NnS2VmZiIzM9Pg1SbNz6xOnTratl27doExhunTp2Pu3Ll6r9HcnjPE3t4er7/+Ol5//XUAL68SffLJJ/jll18wYcIE7TIKmp9do0aNzPaYHc1tPs1VLEMSEhJ09q0qUqkUoaGhuHTpkvZKC/DyFvicOXPw/Plz/PXXX6X+ccUYw+bNmwEAgwYN0vl9orlqdufOnVLr0JzL4lfZ/vrrL21d9+7dK/VnunfvXjx79ozXlaLw8HBkZGQAAHr16mXy64k+WnKAWA1vb2+8+uqryM7ONnkqdVk0naKS1hLSrIHz7NkznUv6GkeOHCnxtZZALpejY8eOAIBt27YZ3Gf79u0V8r2fP3+Op0+fAtD9i7xv374AXk735nPrpixlnVNTaD5ciyosLMSOHTsAQOe25LNnzwAAfn5+eq/Jz8/Hnj17jP6+Hh4e+PrrrwEAycnJ2g/I7t27w8bGBlFRUQbfj3xoMuzYscPg+fjzzz+RkZEBJycno56fVh7GdGg1z44r2mENCAjAG2+8AQD48MMPS1yvDADWrl2LK1euwMbGRq9zGxoaCgC4cuVKiR0nhUKBffv26eyvoVmbad68eWAvJ2QZ/GrXrh0KCgq0V6VM8fz5c7z//vsAgJ49e5p1eRQxo04TsSpffvklgJeLUO7fv19vO2MMZ8+exdGjR006ruYX77///mtwu1wuR7du3QAAn3zyic6tuMuXLxs10LaqzZgxAwDwww8/4MyZMzrbvv/+e5w9e9bs3zMvLw8zZswAYww2NjbajhIAtGzZEsOGDUNKSgqGDh1q8ErXixcvsHXrVqPHqBRV1jk1xRdffKGzXpBarca8efNw//59+Pj46FzR0Ax83rhxo86Vyfz8fEydOlV7taaopKQk/Prrr3rjnABo3+c1atTQjg+qWbMmpk+fjhcvXuC1117D1atX9V5XUFCAffv2Gb3a9fDhw+Hr64uHDx9i9uzZOp3NhIQEzJkzBwAwffp02NnZGXVMvj7++GNMnz7d4IKeSqUSP//8M3bv3g0Aeuu5/fe//4W/vz8SEhIQGhqqd/6VSiW++eYbzJw5EwDwyy+/6K0h1qZNG3Tv3h2MMYwePVrvKnJeXh6mTJmCpKQkeHh4YOLEidptycnJ2gVMDU0gKGrMmDEATFuziTGmvT15+/ZteHl5Yd26dUa/npSObs8Rq/Laa6/h+++/x5w5czBw4EDUr18fjRo1gouLC9LS0nD58mU8efIE8+bNM+ly9eDBg7Fo0SL88MMP+Oeff+Dj4wOJRIKBAwdqF8n78ssvcerUKaxbtw4nT57Eq6++igcPHuDChQsYNWoUoqKitLdPLNGQIUPwn//8B7/88gu6dOmCrl27wsvLC1evXsX169fx/vvv49tvv9VeoTHV8uXLtbeKGGN48uQJLl26hCdPnkAikWD16tV6V1/CwsKQmZmJQ4cOoVGjRmjRogXq1q0LxhgSExNx+fJlFBYW4vr16zoz74wxbNgwREZGYvTo0dpV2IGXVyAaNWpk9HF8fX3RunVrtGrVCsHBwXBzc8P58+dx9+5dVKtWDdu2bdPpRIwfPx7ff/894uLiULduXXTt2hVSqRTR0dHIy8vDzJkz9Ra3zMjIwOTJkzF16lQEBgZqx9fcvn0bcXFx4DgOK1as0HmcyPLly5Gamopt27Zpx4XVq1cPMpkM9+/fR3x8PF68eIFDhw4ZNa7J1tZWu7r4jz/+iPDwcHTo0AHZ2dmIiIhAfn4+evfujYULFxr9s+MrNzcXa9aswZo1a1C7dm20aNEC1atXx9OnT3H58mU8evQIALBgwQK98UCurq6Ijo7G4MGDcfHiRTRv3hxt2rRBQECA9oG9aWlpcHZ2xooVK0rs2GzZsgW9evXC2bNnUa9ePXTq1AleXl54/vw5YmNj8fTpU7i6umLPnj06t27DwsKgVqvRtm1bbQe6JCNHjsTs2bNx+fJlXLx4Ue8K3q+//qodXlBQUID09HRcunRJezUzODgYv/32m8GrmoSnSlragIiIORe3LOkYmvVPSlon5erVq+w///kPa9CgAbOzs2MODg6sXr16rHfv3uyHH35gDx48MC0UY+zPP/9knTt3Zk5OTozjOIPrqMTGxrJevXoxZ2dnZm9vz1q0aMHWrl3L1Gp1mes0lbReUEk/i7LWaSppjRfNOkaG1ltSq9Vs3bp1rFWrVszOzo5Vr16d9erVi506dYpt2rSJAWBvvvlmyT8kA1DC+kx2dnasfv36bPz48ezSpUslvl6lUrFt27axfv36sZo1azK5XM7c3NxYs2bN2Pjx49mff/6ps9BiafmKH3fZsmWsadOmzM7OzuAinKUpujaXQqFgS5YsYY0bN2a2trbM1dWVDRs2jP37778GX5uWlsamTp3KAgICmK2tLfP29majR49mt2/fNvjezsrKYt999x0bMmQIa9CgAXN0dGTVqlVjDRs2ZGPGjDG4MKhGeHg4Gzp0KKtduzaTy+WsevXqrEmTJmzkyJFs27ZtJj8sOzk5mU2bNo3Vq1eP2djYMCcnJ9axY0f2448/MoVCobd/Wf9W+Tz4OD09nf3+++9s8uTJrFWrVszLy4vJZDJWrVo11rhxYzZhwgS9hW+L07yvBg0axLy9vZlcLte+BxwcHNidO3fKrCM3N5etXr2aBQcHMzc3NyaTyZizszNr2bIlW7Bggd7CqkV/D6xZs8aorIMHD2YA2JQpU7RthtbCq1atGvP29mZBQUFszpw5OutKEfPhGKuAteEJIVZnwoQJCAsLw6pVqzB79uyqLocQs3v+/DlCQkIQFxeHXr16Yd++fWUuX0LEhcY0EUK0/v33X71p1Gq1GuvWrcOGDRtgZ2eHN998s4qqI6Riubi44MiRI2jSpAmOHj2KESNGWPQEDlL5aEwTIURrxYoV2LlzJ1q2bInatWvjxYsXuHbtGhITEyGVSrF27VqDqzoTYi08PDxw/PhxrFu3DowxXLx40eA6YESc6PYcIUTr0KFDWLduHS5evIj09HQolUp4enqic+fOmDVrFjp06FDVJRJCSJWhThMhhBBCiBFoTBMhhBBCiBFoTJMJ1Go1Hj58CCcnJ3piNCGEEGIlGGPIzs6Gt7d3qY/gok6TCR4+fKh9phMhhBBCrEtKSorOo3eKo06TCZycnAC8/KFqHldgCTQ9ZDFdAaPMlNlaUWZxZAbEmdtSM2dlZcHHx0f7OV8S6jSZQHOCnZ2dLarTpFAoEBUVhX79+kEul1d1OZWCMlNma0WZxZEZEGduS89cVkeOBoITQgghhBiBOk2EEEIIIUawyE5TTk4OFi5ciD59+sDV1RUcx2mfjl7UuHHjwHGc3pehJ3ar1Wp8/fXXqFu3Luzs7PDqq69i+/btlZCmcshk4rvTSpnFgTKLgxgzA+LMLeTMFrm4ZWJiIurWrQtfX1/Uq1cPUVFRCAsLw7hx43T2GzduHH7//Xf8+uuvOu0uLi547bXXdNoWLFiA5cuXY/LkyWjbti3++usvHDx4ENu3b8fIkSONqisrKwsuLi54/vy5RY1pIoQQQgh/xn6+W2R3z8vLC6mpqahVqxYuXLiAtm3blrivTCbD6NGjSz3egwcPsGrVKkybNg1r1qwBAEyaNAlBQUH48MMPMXz4cEilUrNmqExqtRrp6elwd3cvdX0Ja0KZKbO1osziyAyIM7fQM1tkxba2tqhVq5bR+6tUKmRlZZW4/a+//oJCocDUqVO1bRzHYcqUKbh//z5iY2PLVW9VU6lUiI2NhUqlqupSKg1lFgfKLA5izAyIM7fQM1vklSZT5ObmwtnZGbm5uahRowbefPNNfPXVV3B0dNTuExcXh2rVqqFJkyY6r23Xrp12e5cuXfSOXVBQgIKCAu1/azpmCoUCCoUCACCRSCCVSqFSqaBWq7X7atqVSiWK3gGVSqWQSCQltmuOq6G596tUKkts17xGoVBALpdDrVbrvCE5joNMJiuxvaTaqzJTUYYyafZRq9U6xxdyJmPOEwCLeu+ZI1Np50lTk0qlglwut4pMxduL167Zp3gtQs5k7HnS/K81ZSqtds0+jDG9/YWaqbR2Taai2S0lU/FsJRF0p8nLywtz585Fq1atoFarcfjwYaxduxaXL19GVFSU9sSlpqaiZs2aeusveHl5AXi50rchy5Ytw6JFi/Tajx49CgcHBwCAr68vWrZsiStXriA5OVm7T6NGjdC4cWOcO3cOaWlp2vbAwED4+fnh1KlTyM7O1rZ37NgRnp6eOHr0qM4bLSQkBPb29ggPD9epoV+/fsjLy0NkZKS2LTIyEv3790d6errO1TMnJyeEhoYiJSUF8fHx2nYPDw906tQJt2/fxs2bN7XtlpRJJpMZzKTpFD948ABXr161ikxlnae7d+8CAI4dO2Y1mYw9T3fv3kXTpk2tKlNJ56l58+YAgJiYGOTk5FhFprLOk6ZGzXvbGjIZe56Al5OfoqOjrSZTaefp6dOnAP53ri0lU25uLoxRroHgSUlJuH//PtLT0+Hg4AAPDw80btwYdnZ2fA+pRzOmydBAcEOWLl2Kjz/+WGeAd/fu3ZGamopr167p7KtWqyGVSjFz5kx89913escydKXJx8cH6enp2oFiltCTVyqViImJQadOnWBvb2+1f50UrV2lUiEmJgZdunTR6QwLOVNZ56mgoAB///03OnXqBJlMZhWZyjpPmvd2ly5dYGtraxWZircXr50xpj3PRcdaCjlTWecpLy9P+ztMJpNZRSZjzpPm/d2tWzcUJ9RMpbXL5XIUFhYiOjpae64tJVNWVhbc3d3LHAhucqcpMjISGzZswIkTJ5Camqq3XS6Xo02bNhgyZAjGjRsHNzc3Uw6vx9ROU15eHhwdHTF+/HjtrLoBAwbg+vXr2r/UNXJzc1GtWjXMnz8fy5YtK/PYNHuOEFKRmEoF9dV4sGfp4FzdIWkeCE7Ak1QIEQqzz57buXMnFi5ciFu3boExBh8fHwwePBg1a9aEq6sr8vLy8OzZM9y8eRMXL15ETEwMPvnkE4wePRqLFy/W3gqraPb29nBzc8OzZ8+0bV5eXoiMjARjTOeqhKbT5+3tXSm1VRS1Wo2UlBT4+PgIcjYCH5SZMlsbZXQkCv/7DVj6E20b5+4Jm2mzIesaUoWVVTwxneeixJhb6JmN6jR16NAB586dQ6tWrbBq1SoMHz4ctWvXLnF/hUKBU6dOYcuWLdi5cyd+//13bNq0CUOGDDFb4SXJzs5Geno6PDw8tG2BgYH49ddfcf36dbzyyiva9rNnz2q3C5lKpUJ8fDy8vb0F+SbkgzJTZkvFGDN6fIRW7Clg+edgAIqOvGTpT1CwaD4K5n8OdNS/hVMaBwcHi3ogammEeJ7NQYy5hZ7ZqE6TjY0Njh8/jtDQUKMOKpfL0b17d3Tv3h3ffvstVq1apTNgyxzy8/OhUCj0nkj8xRdfgDGGPn36aNsGDRqE999/H2vXrtWu08QYw08//YTatWujU6dOZq2NECJeL168KPNJ6UVJAFzr3ALetjaQGOjkqBnDg88XoOnpy1Drv7xE2dnZOrOICSHlZ1Sn6dSpU7y/QfXq1fHFF1+Y/Lo1a9YgMzNTO7Nt//79uH//PgBg+vTpyMjIQMuWLfHmm29qH5ty5MgRhIeHo0+fPhg0aJD2WHXq1MGsWbOwYsUKKBQKtG3bFnv37kV0dDS2bt0q6IUtCSGWxdSrTJ2rO6GOnW2J2yUcBx87W3Su7oTozOwS9zNUB3WaCDEvi11yYOXKlUhKStL+9x9//IE//vgDADB69GhUr14dAwYMwLFjx7Bx40aoVCrUr18fS5cuxQcffKB32W/58uWoUaMGfv75Z2zYsAENGjTAli1bMGrUqErNVRE4joOHh4dgLsWbA2UWByFm1ixHYqxatnKz7se3jqokxPNsDmLMLfTMFvnsOUtFs+cIIWUxeUzT1Xjgk9ll7/flN0DzQKMPK6QxTYRUtQp/9lxaWhrCwsJw/vx5ZGZmGlwSneM4nDhxgu+3IEZSqVS4ffs2GjRoIJpbjZSZMlsqjuNQrVo1o/dnbTsgz91TZ9ac3jE9asK+bQerXX5AiOfZHMSYW+iZeXWarly5gtDQUGRkZKC0C1X0V07lUKvVuHnzJgICAgT5JuSDMlNma8FJpbCZNhsFi+aXuI/N1PettsMEiOM8GyLG3ELPzGu+35w5c/Ds2TN8/PHHSEhIgEKhgFqt1vsS6gP5CCGkMsm6hsB24XJw7p467ZxHTdguXG716zQRIhS8rjTFxsZi8ODBWLx4sbnrIYQQUZJ1DYG0UzcUxl9AXMQJtAztDpvANlZ9hYkQoeHVabKxsUFAQIC5ayE8SSQS+Pr6CnKhML4osziILTMnlUIW2Aa2EhvIXn1VNB0msZ1nDTHmFnpmXrPnhgwZgszMTJ0nGIsBzZ4jhBBCrI+xn++8unorV67EP//8g5UrV/IukJiPSqVCXFycqMaQUWZxoMziIMbMgDhzCz2zUbfnJkyYoNfWrFkzzJs3Dz/99BMCAwMN9sw4jsP69evLXyUplVqtRnJyMpo1aybI2Qh8UGbKbK0oszgyA+LMLfTMRnWaNmzYUOK2e/fu4d69ewa3UaeJEEIIIdbCqE5TQkJCRddBCCGEEGLRjOo0+fn5VXQdpBwkEgkaNWok2NkIfFBmcaDM4iDGzIA4cws9s9Gz58LCwhAaGirqDhTNniOEEEKsj9lnz02cOBH16tVDvXr1MHHiRGzZsgUPHjwwS7GkfJRKJWJiYqBUKqu6lEpDmcWBMouDGDMD4swt9MxGd5pmzJiB5s2bIykpCWFhYRg7dix8fX3RsGFDvPPOO9ixYwceP35ckbWSEjDGkJaWVupzAK0NZRYHyiwOYswMiDO30DMbvSL4d999BwDIyMjAyZMnERkZiaioKPzzzz+4c+cOfv31VwBA48aNERISgpCQEAQHB8PNza1CCieEEEIIqUwmP0alRo0aGDx4MAYPHgwAePbsGaKiorSdqGvXruHGjRv48ccfIZFIoFAozF0zIYQQQkilK/fwdVdXVwwdOhSrV6/GxYsXsWPHDjRt2hSMMajVanPUSMoglUoRGBgoyIXC+KLM4kCZxUGMmQFx5hZ6Zl7PntNQKpU4e/YsIiMjERkZidjYWBQUFIAxhsaNGyMoKAg//vijOeutUjR7jhBCCLE+FfLsObVajbNnz2L58uXo3bs3qlevjm7dumHhwoV48uQJJk6ciB07duDRo0e4du2aVXWYLJlSqURERIRgZyPwQZnFgTKLgxgzA+LMLfTMRo9p6tevH06fPo2cnBxwHIfmzZtj0qRJCAoKQrdu3WjAdxVijCE7O1uwsxH4oMziQJnFQYyZAXHmFnpmoztNhw8fhkQiwdChQ/HZZ5+hefPmFVkXIYQQQohFMfr23MCBA+Hi4oI9e/YgMDAQ9evXx+TJk7F161bcv3+/ImskhBBCCKlyJg0EZ4whPj5eu7xAdHQ0nj9/Do7j4O/vj+DgYO2Xj49PRdZdJSx1ILharUZ6ejrc3d0F+zwfU1FmymytKLM4MgPizG2pmY39fC/X7Dm1Wo24uDhEREQgKioKf//9N7Kzs7WdqJCQEO2il9bAUjtNhBBCCOGvQmbP6b1YIkHr1q3x4Ycf4uDBg8jIyMCuXbvQtGlTJCQkICwsrDyHJ0ZSKBQ4ePCgqBYSpcziQJnFQYyZAXHmFnpmk1cELy41NVV7uy4yMhL37t3TbhPq4lVCJNTpm+VBmcWBMouDGDMD4swt5Mwmd5oeP36s89iU27dvA3g53onjOLRo0UL77Llu3bqZvWBCCCGEkKpgdKdp6tSpiIqKws2bNwH8r5PUrFkzBAcHIyQkBEFBQahRo0aFFUsIIYQQUlWMHgiuGeXepEkT7ZWk4OBgUS1qaakDwTWLhTk5OYHjuKoup1JQZspsrSizODID4sxtqZmN/Xw3+krT9u3bERwcjJo1a5qlQGJe9vb2VV1CpaPM4kCZxUGMmQFx5hZyZqNnz40YMYI6TBZKqVQiPDxc0IPrTEWZxYEyi4MYMwPizC30zEZ1mpKTk8v9jR48eFDuYxBCCCGEVBWjOk0NGjTAtGnTkJCQYNLBFQoFtm/fjqZNm2L9+vW8CiSEEEIIsQRGdZq++uor7NixA/Xr10dQUBBWr16N8+fPG1yc6v79+9izZw/effddeHl5YfTo0fDz88OoUaPMXjwhhBBCSGUxevZcZmYmvvnmG6xfvx6pqangOA4SiQTVq1dH9erVkZ+fj2fPniE/P//lgTkOvXv3xpw5cxAaGlqhISqLJc+eUyqVkMlkFjUboSJRZspsrSizODID4sxtqZkr7NlzKpUKhw4dwokTJxATE4P79+/j6dOnsLe3h4eHB5o3b46goCAMGjQIfn5+5Q5iSSy502SJUzgrEmWmzNaKMosjMyDO3JaaucKePSeVSjFgwAB8++23OHv2LB48eID8/HxkZGTg1q1b2LNnD2bMmGF1HSZLplQqERkZKdjZCHxQZnGgzOIgxsyAOHMLPXO5HthLCCGEECIW1GkihBBCCDECdZqshExm8rOXBY8yiwNlFgcxZgbEmVvImU0eCC5mljoQnBBCCCH8VdhA8MqQk5ODhQsXok+fPnB1dQXHcdiwYUOpr1EoFHjllVfAcRxWrlypt12tVuPrr79G3bp1YWdnh1dffRXbt2+voASVS61W48mTJ1Cr1VVdSqWhzOJAmcVBjJkBceYWemaL7DSlp6dj8eLFuH79Olq0aGHUa1avXl3q414+/vhjzJs3Dz179sTq1avh6+uLUaNG4ffffzdX2VVGpVIhNjYWKpWqqkupNJRZHCizOIgxMyDO3ELPbJGdJi8vL6SmpiIpKQkrVqwoc/8nT55g8eLFmDdvnsHtDx48wKpVqzBt2jT88ssvmDx5Mvbv34+uXbviww8/FOzJI4QQQkjlschOk62tLWrVqmX0/vPnz0ejRo0wevRog9v/+usvKBQKTJ06VdvGcRymTJmC+/fvIzY2ttw1E0IIIcS6lWsI+7lz53D+/HlkZmYavFrDcRw+/fTT8nwLo2rYuHEj/v777xJXF42Li0O1atXQpEkTnfZ27dppt3fp0kXvdQUFBSgoKND+d1ZWFoCX46c0z92TSCSQSqVQqVQ692g17UqlEkXH2kulUkgkkhLbiz/PTzPLoPhCYEXblUolHB0doVQqIZfLoVardc4Hx3GQyWQltpdUe1VmKspQJpVKBScnJzDGdI4v5ExlnSe1Wq09z9aSqazzpHlva/axhkzF24vXzhiDk5OT3u9UIWcq6zwV/R1mLZmMOU+a3AD09hdqptLa5XI5GGM659pSMhl6lq4hvDpNz549w+DBg3H69GmUNvmuojtNjDFMnz4dI0aMQMeOHZGYmGhwv9TUVNSsWVOvU+Xl5QUAePjwocHXLVu2DIsWLdJrP3r0KBwcHAAAvr6+aNmyJa5cuaIzpqpRo0Zo3Lgxzp07h7S0NG17YGAg/Pz8cOrUKWRnZ2vbO3bsCE9PTxw9elTnjRYSEgJ7e3uEh4fr1NCvXz/k5eUhMjJS2xYREYH+/fsjPT1d5+qZk5MTQkNDkZKSgvj4eG27h4cHOnXqhNu3b+PmzZvadkvKJJPJSs2UlJRkdZlKOk8JCQnIycnB0aNHrSaTsecpISHB6jKVdp5CQ0MRERFhVZlKO08RERFQKpXa97Y1ZLLG82SOTJmZmTq/xywlU25uLozBa8mBcePGYdOmTQgODsbYsWNRp06dEtddCAoKMvXwOi5cuIC2bdsiLCwM48aN09kWFhaGadOm4ebNm/Dx8UFiYiLq1q2LFStW4IMPPtDu1717d6SmpuLatWs6r1er1ZBKpZg5cya+++47ve9t6EqTj48P0tPTtVMSLaEnr1ar8eDBA9SuXRu2trZW+ddJ8doZY0hNTUXt2rV1vqeQM5V1nhQKBVJSUlC7dm1IJBKryFTWedK8t318fCCXy60iU/H24rVzHIcHDx7Ay8tL5w89IWcq6zwVFBRof4dJJBKryGTMeVKr1UhNTYWPj4/ebDKhZiqtXfNvODk5WXuuLSVTVlYW3N3dy1xygNeVpgMHDqBdu3Y4ceJElT1wLysrCwsWLMCHH34IHx+fUve1t7fX6fxo5Ofna7cbYmtrC1tbW712uVwOuVyu0yaVSiGVSvX2LakzWVJ78eMa065QKHD16lXtz0HzgVpcSe0l1V6VmYorXrtCoUB8fDy8vb0N7i/ETGW1A9Ce56LHE3Kmss5T8fe2NWQqrnjtZb23hZiprHaJRGLwvS3kTMbUrlAocPnyZdSuXdtqMpXVzhgzeK6rOlNJGYrjNRA8Ly8P3bp1q9InFK9cuRKFhYUYMWIEEhMTkZiYiPv37wMAMjIykJiYiMLCQgAvb8M9evRI71ZiamoqAMDb27tyiyeEEEKI4PDqNAUGBpY4fqiyJCcnIyMjA02bNkXdunVRt25ddO3aFQCwdOlS1K1bV3s7LjAwELm5ubh+/brOMc6ePavdTgghhBBSGl6dpoULF2Lfvn04c+aMuesx2owZM/Dnn3/qfP38888AXo65+vPPP1G3bl0AwKBBgyCXy7F27Vrt6xlj+Omnn1C7dm106tSpSjKYC8dx8PDwqNIrf5WNMosDZRYHMWYGxJlb6JmNGtO0adMmvbb+/fsjKCgIb731Flq1alXiwKkxY8bwKmzNmjXIzMzUzmzbv3+/9vbb9OnT0apVK7Rq1UrnNZqrX02bNsXgwYO17XXq1MGsWbOwYsUKKBQKtG3bFnv37kV0dDS2bt1q8L6okMhkMsF3/ExFmcWBMouDGDMD4swt9MxGzZ7TjHAvqvjLDG3nOI73atv+/v5ISkoyuC0hIQH+/v567SXNngNezlL46quv8PPPPyM1NRUNGjTAggUL8NZbbxldk6U+sFelUuH27dto0KCB4DuAxqLMlNlaUWZxZAbEmdtSMxv7+W7UlaawsDCzFWYsPmOm/P39S1w3SiKRYMGCBViwYEE5K7M8arUaN2/eREBAgEW9CSsSZabM1ooyiyMzIM7cQs9sVKdp7NixFV0HIYQQQohFs8hnzxFCCCGEWBpenaYDBw5g6NChJT5+5OHDhxg6dCgOHTpUruKIcSQSCXx9fUtcENEaUWZxoMziIMbMgDhzCz0zr8eo9O3bFw8fPsTly5dL3Kdly5aoXbs2Dhw4UK4CLYmlDgQnhBBCCH/Gfr7z6updvnwZ7du3L3Wf9u3b6zx8j1QclUqFuLg43jMVhYgyiwNlFgcxZgbEmVvomXl1mp49ewZPT89S93F3d0d6ejqvoohp1Go1kpOT9R74aM0oszhQZnEQY2ZAnLmFnplXp8nDwwM3b94sdZ+bN2/C1dWVV1GEEEIIIZaGV6epW7du2L9/P65cuWJw++XLl7Fv3z4EBQWVqzhCCCGEEEvBq9M0b948AECXLl2wePFixMbGIjk5GbGxsVi0aBG6du2qXUySVDyJRIJGjRoJdjYCH5RZHCizOIgxMyDO3ELPzGv2HADs2bMHY8eORV5enk47YwyOjo7YtGmTzvPfrAHNniOEEEKsT4XOngOAYcOG4d69e1i2bBmGDh2K7t27Y9iwYfj6669x9+5dq+swWTKlUomYmBgolcqqLqXSUGZxoMziIMbMgDhzCz2zUY9RKYmnpyfmzp1rrloIT4wxpKWllfjcPWtEmcWBMouDGDMD4swt9MzCvKlICCGEEFLJytVp2rp1K3r27AkPDw/Y2trCw8MDPXv2xLZt28xVHyGEEEKIReB1e06lUuGNN97A3r17wRiDnZ0dvL298fjxY5w4cQIRERHYs2cPdu3aJdgR8kIilUoRGBgIqVRa1aVUGsosDpRZHMSYGRBnbqFn5tWj+eGHH/Dnn3+ic+fOOH36NHJzc5GQkIDc3FzExMSgS5cu2Lt3L1avXm3ueokBEokEfn5+ouqgUmZxoMziIMbMgDhzCz0zr6o3btyIhg0b4sSJE+jYsaPOtg4dOuD48eNo2LAhwsLCzFIkKZ1SqURERIRgZyPwQZnFgTKLgxgzA+LMLfTMvDpNt27dwsCBAyGXyw1ul8vleO2113Dr1q1yFUeMwxhDdna2YGcj8EGZxYEyi4MYMwPizC30zLw6TTY2Nnjx4kWp+7x48QI2Nja8iiKEEEIIsTS8Ok0tW7bEzp078fDhQ4PbU1NTsXPnTrRq1apcxRFCCCGEWApej1HZv38/Bg0ahFq1amHOnDkICgpCzZo18fjxY0RFReGbb77B48eP8ddff2HAgAEVUXeVsNTHqKjVaqSnp8Pd3V2wg+tMRZkps7WizOLIDIgzt6VmNvbznfez57755hvMnz8fKpVKp50xBplMhq+++grvv/8+n0NbLEvtNBFCCCGEvwp/9tzs2bNx48YNfP755xg8eDBCQ0MxePBgLF68GDdu3LC6DpMlUygUOHjwIBQKRVWXUmkoszhQZnEQY2ZAnLmFnrlcz56rV68ePv30U3PVQspBqNM3y4MyiwNlFgcxZgbEmVvImc1yQ1GpVCIjI0PQPwhCCCGEkNLw7jSpVCp8++23aNGiBezs7ODu7g47Ozu0aNEC3333HXWgCCGEEGJVeA0Ez8nJQe/evXHmzBlIJBL4+PhoZ8+lpKRArVajY8eOOHLkCKpVq1YRdVcJSx0IrlkszMnJCRzHVXU5lYIyU2ZrRZnFkRkQZ25LzVyhA8E/++wzxMbG4s0338Tdu3dx7949xMbG4t69e7h79y5GjhyJmJgYfPbZZ7wDENPY29tXdQmVjjKLA2UWBzFmBsSZW8iZeXWadu7ciTZt2mDLli3w9fXV2ebr64utW7eidevW2LFjh1mKJKVTKpUIDw8X1S1RyiwOlFkcxJgZEGduoWfm1Wl6+vQpevToUeo+PXr0wLNnz3gVRQghhBBiaXh1mho0aIAnT56Uuk9aWhrq16/PqyhCCCGEEEvDq9M0c+ZM7NixA//++6/B7VevXsXvv/+OWbNmlac2QgghhBCLwWv23KlTp7Bq1SocPXoUY8eORZcuXbSz56Kjo7Fp0yb07t0bs2fP1nttt27dzFJ4VbDk2XNKpRIymcyiZiNUJMpMma0VZRZHZkCcuS01c4U+e04ikYDjOGheWjS4obaiij+rTkgsudNkiVM4KxJlpszWijKLIzMgztyWmtnYz3dej1H57LPPLCqs2CmVSkRGRqJfv36Qy+VVXU6loMyU2VpRZnFkBsSZW+iZeXWaPv/8czOXQQghhIgHU6mgvnIJNW/9C3WdWmCBbcBJpVVdFilDuR7YSwghhBDTKKMjUfjfb8DSn6A5AOWRv6By94TNtNmQdQ2p6vJIKcr1wN64uDjMnTsXAwcO1Fm3KSkpCTt37qR1miqRTCa+/i9lFgfKLA5iyayMjkTBovlg6brL9rD0JyhYNB/K6MgqqqzyCPlc8xoIDgBz587FqlWrdAZ+awZ5JyYmon79+li1ahVmzpxpvmqrmKUOBCeEEFK51Go10tPTTXyRCvYzJoJ79hSGRgUzAMzNHXnf/wpIjLtV5+7uDomkXNc/CCp49lxYWBgmTpyI1157DUuWLMH27duxfPlynZlxnTp1gr29PU6cOMEvgQWy1E6T5h+vmP7xUGbKbK0oszAyP3nyBDVr1jTpNV2rO+FQ6yZl7tf34nVEZ2YbdczHjx/D09PTpDqqkqWe6wp9YO/atWvRpEkT7NmzB82aNYONjY3ePo0bN8bt27f5HJ6YSKVSITY2VtDLOZiKMosDZRYHsWSuZWvcbDFj96tsjDG8ePGC91fO8+dIiziGf9b+gKyYU3iRlcXrODxvkJkFrxuL165dw+TJk0u9L1mzZs0yH7ViSE5ODlasWIGzZ8/i3LlzyMjIQFhYGMaNG6ez37p167BlyxbcuHEDmZmZ8Pb2RnBwMBYuXAh/f3+9465fvx4rV65EQkICfHx8MGPGDEyfPt3k+gghhBB3d3c8fvzYpNdIrl0FvvyozP3W/r4Ta15pbnQdlSU3NxeOjo68XjvQowa+buiLOna2aA8Afx/D/fwCzL2VjH1pGSYdKycnB9WqVeNVR3nx6jTJZDIUFhaWus/Dhw95/XDT09OxePFi+Pr6okWLFoiKijK4X1xcHOrWrYuBAweiRo0aSEhIwLp163DgwAFcvnwZ3t7e2n1//vlnvPvuuxg2bBhmz56N6OhozJgxA7m5uZg3b57JNRJCCBE3iURi8m0x5haMPHdPvUHgRXEeNeHWNdiqlh8Y6FEDW5rrP4vW29YGW5rXx+ird0zuOFUVXp2m5s2bIyIiAiqVClIDJzY3NxfHjx9H69atTT62l5cXUlNTUatWLVy4cAFt27Y1uN/atWv12gYPHow2bdpg06ZNmD9/PgAgLy8PH3/8Mfr374/du3cDACZPngy1Wo0vvvgC//nPf1CjRg2T67QkHMdZ3OqqFY0yiwNlFgexZOakUthMm42CRfNL3Mdm6vsW22FycHBATk6OaS9SqYDJo4CnaXqbJP9/vreFdgJ+2QYYmdvBwcG0GsyI15imCRMm4NatW3j33XdRUFCgsy0rKwvjxo3Do0ePMHnyZJOPbWtri1q1avEpS3tbLjMzU9sWGRmJp0+fYurUqTr7Tps2DS9evMDBgwd5fS9LIpPJEBoaKuhpnKaizOJAmcVBTJllXUNgu3A5OHfdq1ScR03YLlxu0es0cRyHatWqmfRld++2wQ6TjvQ02N27bfQxq7JzzesdOmHCBBw/fhzr16/Hjh07UL16dQBAu3btcP36dbx48QLjxo3D66+/bs5aDXr69ClUKhWSk5OxePFiAED37t212+Pi4gAAbdq00Xld69atIZFIEBcXh9GjRxs8dkFBgU6nMCsrCwCgUCigUCgAvLxEK5VKoVKpoFartftq2pVKpc6gNalUColEUmK75rgaml8iSqWyxHa1Wo0HDx6gdu3asLW1hVqt1hlQyXEcZDJZie0l1V6VmYqSy+V6tTPGkJqaitq1a+t8TyFnKus8KRQKpKSkoHbt2pBIJFaRqazzpHlv+/j4QC6XW0Wm4u3Fa+c4Dg8ePICXl5fOh4OQM5V1ngoKCrS/wyQSiVVkKvU8degCWduOUF2NR+a9u3Br0BB4pTmYVKp9neAyldCuSjNu3JfqaRrURY5f2ZmKZysJ7279tm3bEBISgjVr1uCff/4BYwwXLlxAkyZNMGPGDLzzzjt8D22S2rVrazs2bm5u+OGHH9CzZ0/t9tTUVEilUr17zzY2NnBzc8PDhw9LPPayZcuwaNEivfajR49qLw/6+vqiZcuWuHLlCpKTk7X7NGrUCI0bN8a5c+eQlva/XnZgYCD8/Pxw6tQpZGf/b0ppx44d4enpiaNHj+q8AUNCQmBvb4/w8HCdGvr164e8vDxERv5vIbTr16+jf//+SE9PR2xsrLbdyckJoaGhSElJQXx8vLbdw8MDnTp1wu3bt3Hz5k1tuyVlkslkBjM5OjoiJycHKpUKV69etYpMZZ2nW7du4c6dO9q81pDJ2POUl5eHpk2bWlWmks5T8+bNcfXqVdy5c0fnVoiQM5V1no4fPw6lUql9b1tDJmPPE+wc0dU/ANFHjlhNpqLnqcb9JBgzUCddqcb5Isep7Ey5ublGVFmOxS2LysvLQ0ZGBpydnXmPrDdEM6bJ0Ow5jcjISOTn5+P69evYsmUL3njjDe14JgCYOHEitm/fbvAH4uvri1atWmHv3r0Gj23oSpOPjw/S09O16zhYQk9eoVDg2LFj6NmzJxwcHKzir5OiDP3FpVQqcfToUfTp00dnrQ8hZyrrPOXn5+PIkSPo2bMn5HK5VWQq6zxp3tu9e/eGnZ2dVWQq3l68drVajcOHD6NXr146t6uEnKms85Sbm6v9HSaXy60ikzHnSfP+7tu3r94tJ0vMVFhYqPNZatR5UqkgmzoGeJpe4oKenLsH1D9tQdEFJ0rL5OTkpL0SXd5MmvOUlZUFd3f3MtdpMssNZHt7e9jb25vjUCYLCXl5/7dv374YNGgQmjVrBkdHR7z33nva2kqa6Zefn19q3ba2trC1tdVrl8vlek9nlkqlBgfFl3SPvqT2kp76bEy75v9rbt0UV1J7SbVbQiaN0jIZ2l/omUo6T5pjFT2e0DMZc540/9+aMmkUr13zS1wmkxmsR4iZymrX1Fj8vW0NmYxp5zhOEJkUCgWviVOa2XMM/xv8DQDq/+/EvBURg30mHFez5IC533vGsJzlOM0gICAALVu2xNatW7VtXl5eUKlUemtGFRYW4unTpzpLEwgVx3Hw8PCw+pknRVFmcaDM4iDGzIB4cu9Ly8Doq3fwsED3AsaDgkJBLTcAmOlKkyXJy8vTuaUWGBgI4OWtvn79+mnbL1y4ALVard0uZDKZDJ06darqMioVZRYHyiwOYswMCC83ryUHimBKJfLiLoDLfAbbml7wafoqtvFYXkFwSw5UNaVSiYwM/Z7puXPncPXqVZ2ZcqGhoXB1dcWPP/6os++PP/4IBwcH9O/fv8LrrWgqlQo3btyw+kcQFEWZxYEyi4MYMwPCy81nyYGiX44uLqjRNRhpjZvDvl1HVHN25nUcwS05UNHWrFmDzMxM7cy2/fv34/79+wCA6dOngzEGHx8fjBgxAk2bNkW1atVw9epVhIWFwcXFBZ9++qn2WPb29vjiiy8wbdo0DB8+HL1790Z0dDS2bNmCJUuWwNXVtUoympNarcbNmzcREBBg8B6vNaLMlNlaUWZxZAbEmVvomS2y07Ry5UokJSVp//uPP/7AH3/8AQAYPXo0vL29MWnSJERGRmL37t3Iy8uDt7c33nzzTXzyySd6z56bOnUq5HI5Vq1ahX379sHHxwfffvstZs6cWZmxCCGEECJgvDpNmzZtQs2aNdG7d29z1wMASExMLHOf7777zqRjTp48mdcK5YQQQgghAM8xTRMnTsThw4fNXQvhSSKRwNfX1+AUVGtFmcWBMouDGDMD4swt9My8Frf09fXFoEGDsHr16oqoyWJlZWXBxcWlzMWvCCGEECIcxn6+8+rqDRw4EMeOHdN7WC+pGiqVCnFxcYKZgWEOlFkcKLM4iDEzIM7cQs/Mq9O0ZMkSVKtWDUOHDsW///5r7pqIidRqNZKTk3WWlLd2lFkcKLM4iDEzIM7cQs/MayB4y5YtUVBQgPj4eBw+fBh2dnbw9PTUWzuB4zjcvXvXLIUSQgghhFQlXp0mtVoNGxsb+Pr66rQXHx5lhmcBE0IIIYRYBF6dJmOWBCCVRyKRoFGjRoKdjcAHZRYHyiwOYswMiDO30DPzmj0nVjR7jhBCCLE+FTp7rqhr167hjz/+wObNm8t7KMKTUqlETEwMlEplVZdSaSizOFBmcRBjZkCcuYWemXen6fz58wgMDETz5s0xfPhwjBs3Trvt1KlTcHBwwL59+8xRIykDYwxpaWmiGkNGmcWBMouDGDMD4swt9My8Ok3//vsvQkNDkZCQgPfffx99+/bV2d61a1e4u7tj165dZimSEEIIIaSq8eo0LVy4EABw8eJFrFy5Em3bttXZznEcOnbsiPPnz5e/QkIIIYQQC8Cr03Ty5EkMGzYM9evXL3EfX19fpKam8i6MGE8qlSIwMBBSqbSqS6k0lFkcKLM4iDEzIM7cQs/Ma8mB7OxseHp6lrpPXl6eYJdJFxqJRAI/P7+qLqNSUWZxoMziIMbMgDhzCz0zrytNPj4+uHr1aqn7XLp0CQEBAbyKIqZRKpWIiIgQ7GwEPiizOFBmcRBjZkCcuYWemVenacCAATh69CiOHz9ucPvOnTtx5swZDB48uDy1ESMxxpCdnS3Y2Qh8UGZxoMziIMbMgDhzCz0zr9tzH330EXbv3o1+/fph7NixePToEQBg7dq1iI2Nxfbt2+Hv74/Zs2ebtVhCCCGEkKrCq9Pk4eGBkydP4u2338b69eu17e+99x4AoH379ti+fTtcXFzMUyUhhBBCSBUr92NU4uPjcebMGTx79gzOzs5o37693hIE1sJSH6OiVquRnp4Od3d3wT7Px1SUmTJbK8osjsyAOHNbamZjP9/p2XMmsNROEyGEEEL4q7Rnzz19+hQRERH4888/ERERgadPn5b3kMRECoUCBw8ehEKhqOpSKg1lFgfKLA5izAyIM7fQM/Ma0wQAiYmJmDlzJg4ePKgzCp7jOAwYMADfffcd/P39zVEjMYJQp2+WB2UWB8osDmLMDIgzt5Az8+o03b17F507d8aTJ0/QoEEDdO7cGTVr1sTjx48RExODffv24cyZM4iJiUG9evXMXTMhhBBCSKXj1WmaN28e0tLS8NNPP2Hy5MngOE67jTGGX375BVOnTsW8efPoob2EEEIIsQq8BoLXqFEDwcHB+PPPP0vcZ9CgQTh16hQyMjLKVaAlsdSB4JrFwpycnHQ6sNaMMlNma0WZxZEZEGduS81coQPBVSoVmjZtWuo+zZo1o2fPVSJ7e/uqLqHSUWZxoMziIMbMgDhzCzkzr05Tq1at8O+//5a6z7///os2bdrwKoqYRqlUIjw8XNCD60xFmcWBMouDGDMD4swt9My8Ok1LlizBoUOH8Ouvvxrc/ssvv+DIkSP48ssvy1UcIYQQQoil4DUQ/MSJEwgJCcE777yDVatW6cyeO336NG7duoXevXvj+PHjOg/15TgOn376qdmKJ4QQQgipLLw6TZ9//rn2/9+8eRM3b97U2+fw4cM4fPiwTht1mgghhBAiVLxmz508eZL3NwwKCuL92qpmybPnlEolZDKZRc1GqEiUmTJbK8osjsyAOHNbamZjP995XWkScsfHWuXl5cHJyamqy6hUlFkcKLM4iDEzIM7cQs5sOY8YJrwplUpERkYKdjYCH5RZHCizOIgxMyDO3ELPTJ0mQgghhBAjUKeJEEIIIcQI1GmyEjIZr+FpgkaZxYEyi4MYMwPizC3kzLxmz4mVpc6eI4QQQgh/FfrsOWJZ1Go1njx5ArVaXdWlVBrKLA6UWRzEmBkQZ26hZ6ZOkxVQqVSIjY0V1QOSKbM4UGZxEGNmQJy5hZ6ZOk2EEEIIIUYwajSWRCLhtXInx3GCXYuBEEIIIaQoo640devWTe+refPmYIxBIpHAz88P7dq1g5+fHyQSCRhjaN68Obp27WpyQTk5OVi4cCH69OkDV1dXcByHDRs26OyjVquxYcMGDBw4ED4+PqhWrRqaNWuGL7/8Evn5+QaPu379ejRp0gR2dnZo0KABVq9ebXJtlorjODg5OVnUkvQVjTKLA2UWBzFmBsSZW+iZec2eu3//Pjp37oyuXbti6dKl8PX11W5LTk7GggULcPr0afz999+oU6eOScdOTExE3bp14evri3r16iEqKgphYWEYN26cdp+cnBw4OTmhQ4cOGDBgADw9PREbG4uNGzeiW7duiIiI0DkhP//8M959910MGzYMvXv3RnR0NDZv3ozly5dj3rx5RtdGs+cIIYQQ62P05zvjYcSIEax9+/al7tO+fXs2cuRIk4+dn5/PUlNTGWOMnT9/ngFgYWFhOvsUFBSw06dP67120aJFDAA7duyYti03N5e5ubmx/v376+z71ltvsWrVqrFnz54ZXdvz588ZAPb8+XMTElU8lUrFEhMTmUqlqupSKg1lFgfKLA5izMyYOHNbamZjP995DQQ/fvw4unfvXuo+oaGhOH78uMnHtrW1Ra1atUrdx8bGBp06ddJrHzJkCADg+vXr2rbIyEg8ffoUU6dO1dl32rRpePHiBQ4ePGhyjZZGpVIhPj5esLMR+KDM4kCZxUGMmQFx5hZ6Zl7Lcubn5yM1NbXUfR4+fIi8vDxeRfH16NEjAIC7u7u2LS4uDgDQpk0bnX1bt24NiUSCuLg4jB492uDxCgoKUFBQoP3vrKwsAIBCoYBCoQDwcpC8VCqFSqXSWXdC065UKsGK3AGVSqWQSCQltmuOq6FZObX4gPqi7ZrXKBQKyOVyqNVqnTckx3GQyWQltpdUe1VmKspQJs0+arVa5/hCzmTMeQJgUe89c2Qq7TxpalKpVJDL5VaRqXh78do1+xSvRciZjD1Pmv+1pkyl1a7ZhzGmt79QM5XWrslUNLulZCqerSS8Ok2tW7fG77//jsmTJ6Njx45622NiYrBjxw506NCBz+F5+/rrr+Hs7Iy+fftq21JTUyGVSuHp6amzr42NDdzc3PDw4cMSj7ds2TIsWrRIr/3o0aNwcHAAAPj6+qJly5a4cuUKkpOTtfs0atQIjRs3xrlz55CWlqZtDwwMhJ+fH06dOoXs7Gxte8eOHeHp6YmjR4/qvNFCQkJgb2+P8PBwnRr69euHvLw8REZGatsiIyPRv39/pKenIzY2Vtvu5OSE0NBQpKSkID4+Xtvu4eGBTp064fbt27h586a23ZIyyWQyg5kcHR0BAA8ePMDVq1etIlNZ5+nu3bsAgGPHjllNJmPP0927d9G0aVOrylTSeWrevDmAl79Hc3JyrCJTWedJU6PmvW0NmYw9T8DLcbrR0dFWk6m08/T06VMA/zvXlpIpNzcXxuA1EPzvv/9G9+7doVKp8Nprr6FLly7w9PTEkydPEB0djQMHDkAmk+HEiRPo3LmzqYfXunDhAtq2bas3ENyQpUuX4uOPP8batWsxZcoUbfvEiROxfft2gz8QX19ftGrVCnv37jV4TENXmnx8fJCenq4dKGYJPXmlUomLFy+idevWsLe3t9q/TorWrlKpcOnSJbRp00Zn0L+QM5V1ngoKCnD+/Hm0bt0aMpnMKjKVdZ407+22bdvC1tbWKjIVby9eO2MMFy5cQKtWrSCVSq0iU1nnKS8vT/s7TCaTWUUmY86TUqnEpUuX0K5dOxQn1EyltcvlchQWFuLcuXPac20pmbKysuDu7l4xA8EZY+z48eOsXr16jOM4xnEck0gk2v9fr149dvz4cb6H1ippIHhxv//+O+M4jk2cOFFv27Rp05hUKjX4Og8PD5MGq1vqQHBCCCGE8Gfs5zvvRw13794dd+7cwd9//43Lly/j+fPncHFxQYsWLdClS5dKW4Ph2LFjGDNmDPr374+ffvpJb7uXlxdUKhWePHmic4uusLAQT58+hbe3d6XUWZFUKhVu376NBg0a6Pxlas0oM2W2VpRZHJkBceYWeuZyPUaF4zh07doV7733Hj7++GO899576Nq1a6V1mM6ePYshQ4agTZs22Llzp/aSYFGBgYEAXt7qK+rChQtQq9Xa7UKmVqtx8+ZNwT4AkQ/KLA6UWRzEmBkQZ26hZy73s+euXbuGP/74A5s3bzZHPUa7fv06+vfvD39/fxw4cAD29vYG9wsNDYWrqyt+/PFHnfYff/wRDg4O6N+/f2WUSwghhBCB43177vz585g8ebLOzKW3334bAHDq1Cn06dMHv//+OwYOHGjysdesWYPMzEztzLb9+/fj/v37AIDp06dDIpGgd+/eyMjIwIcffqi31lJAQIB2Vp+9vT2++OILTJs2DcOHD9euCL5lyxYsWbIErq6uvPITQgghRFx4dZr+/fdfhIaGQiKR4P3338eNGzdw6NAh7fauXbvC3d0du3bt4tVpWrlyJZKSkrT//ccff+CPP/4AAO2aSikpKQCA+fPn671+7NixOkshTJ06FXK5HKtWrcK+ffvg4+ODb7/9FjNnzjS5NkskkUjg6+sLiaTcFw4FgzKLA2UWBzFmBsSZW+iZeS058Prrr+PIkSOIi4tD/fr1sWjRIixevFhnuuCIESNw+fJl3Lhxw6wFVyV69hwhhBBifYz9fOfV1Tt58iSGDRuG+vXrl7iPr69vmauGE/NQqVSIi4sT7LL0fFBmcaDM4iDGzIA4cws9M69OU3Z2tt4K28Xl5eUJ9ociNGq1GsnJyYKdjcAHZRYHyiwOYswMiDO30DPz6jT5+PjoDAA35NKlSwgICOBVFCGEEEKIpeHVaRowYACOHj2K48ePG9y+c+dOnDlzBoMHDy5PbYQQQgghFoPX7LmPPvoIu3fvRr9+/TB27Fg8evQIALB27VrExsZi+/bt8Pf3x+zZs81aLDFMIpGgUaNGgp2NwAdlFgfKLA5izAyIM7fQM/OaPQcA9+7dw9tvv63zBGaN9u3baztO1oRmzxFCCCHWp0JnzwFAvXr1cPr0aVy6dAlr167Fl19+iR9++AFnz55FbGys1XWYLJlSqURMTIzeE6WtGWUWB8osDmLMDIgzt9Az814RXCMwMNAqnt8mZIwxpKWlgedFQ0GizOJAmcVBjJkBceYWemZh3lQkhBBCCKlkvK80ZWdnY/369bh8+TIePnwIhUKhtw/HcThx4kS5CiSEEEIIsQS8Ok3nz59H3759kZGRUeolNo7jeBdGjCeVShEYGAipVFrVpVQayiwOlFkcxJgZEGduoWfmNXuuU6dOOHfuHJYtW4Y333wTXl5egv0BmIJmzxFCCCHWp0Jnz8XFxWHkyJH48MMPUadOHVF0mCyZUqlERESEYGcj8EGZxYEyi4MYMwPizC30zLw6Ta6urvDw8DB3LYQnxhiys7MFOxuBD8osDpRZHMSYGRBnbqFn5tVpGjx4MCIiIgT7wD1CCCGEEFPx6jQtW7YMcrkcb731Fh48eGDumgghhBBCLA7vx6hcunQJPXr0wPPnz1GjRg2DA6c4jsPdu3fLXaSlsNSB4Gq1Gunp6XB3dxfs83xMRZkps7WizOLIDIgzt6VmNvbznVen6cSJE3jttdeQn58PuVwOT09PyGSGVy9ISEgw9fAWy1I7TYQQQgjhr0Jnz82bNw+MMezYsQP5+flISUlBQkKCwS9S8RQKBQ4ePGhwgVFrRZnFgTKLgxgzA+LMLfTMvBa3vHbtGkaPHo3hw4ebux7Ck1Cnb5YHZRYHyiwOYswMiDO3kDPzutLk4eEBe3t7c9dCCCGEEGKxeHWa3nrrLRw6dAh5eXnmrocQQgghxCLxGgheWFiIESNG4NmzZ1i6dClatGgBR0fHiqjPoljqQHDNYmFOTk6ied4fZabM1ooyiyMzIM7clprZ2M93XmOaNLfmGGPo1q1biftxHCfoe5dCIsbbpZRZHCizOIgxMyDO3ELOzKvT1LVrV4vqIYqdUqlEeHg4+vXrB7lcXtXlVArKTJmtFWUWR2ZAnLmFnplXpykqKsrMZRBCCCGEWDbLWY6TEEIIIcSCUaeJEEIIIcQIvGbPhYaGGndwjsOJEydMLspSWfLsOaVSCZlMJpqxZpSZMlsryiyOzIA4c1tq5gqdPVfWmCaO48AYs6gfiLXLy8uDk5NTVZdRqSizOFBmcRBjZkCcuYWcmdftObVabfArMzMTERERaN++PV5//XUUFhaau15igFKpRGRkpKiWd6DM4kCZxUGMmQFx5hZ6ZrOOaXJ2dkZwcDCOHDmCc+fOYcmSJeY8PCGEEEJIlamQgeBOTk7o27cvwsLCKuLwhBBCCCGVrsJmz0kkEqSmplbU4UkxMhmv4WmCRpnFgTKLgxgzA+LMLeTMvGbPleXevXvo0KEDXF1dcePGDXMfvspY6uw5QgghhPBXobPnJkyYYLBdqVTiwYMH+Pvvv6FQKLB48WI+hycmUqvVSE9Ph7u7OyQScSy9RZkps7WizOLIDIgzt9Az8+o0bdiwodTtjRo1wpw5czBp0iQ+hycmUqlUiI2NRb9+/QT5JuRDbJmZSgVF/AXcizgBl9DusAlsA04qreqyKpzYzjNAmcWSGRBnbqFn5tVpSkhIMNgukUhQvXp1wa6/QEhFYowhNzfX9BfGngJb919wT9PQHIDyyF9QunkAk6cBHbuZfDgHBwdaQ40QQnjg1Wny8/Mzdx2EWL3c3Fw4Ojqa9JqBHjWwpXl9ANDp6KjTnwDLFmL01TvYl5Zh0jFzcnJQrVo1k15DCCHEzLPnsrKycOzYMURHR6MCxpeTEnAcBycnJ1FdPRBDZgmArxv6vvz/xXJq/vurhr5W/QBJMZzn4iizeIgxt9Az85o9t27dOmzZsgV79+5FjRo1AACXL19G37598fjxYwBAx44dcfToUTg4OJi34ipEs+dIeZh8e+5qPPDJ7LL3+/IboHmg0Yel23OEEKLL2M93Xn+kbt68GQUFBdoOEwDMmTMHT548wfjx49GvXz/Exsbixx9/NPnYOTk5WLhwIfr06QNXV1dwHGdw4Pm5c+cwdepUtG7dGnK5vMwPgfXr16NJkyaws7NDgwYNsHr1apNrs1RqtRpJSUlQq9VVXUqlEUXmjKfm3U+ARHGei6HM4iHG3ELPzKvTdOvWLbRo0UL730+fPkVkZCQmTZqEX3/9Ffv370fbtm2xdetWk4+dnp6OxYsX4/r16zrfo7jw8HD8+uuv4DgO9erVK/WYP//8MyZNmoSmTZti9erV6NixI2bMmIGvvvrK5PoskUqlQnx8PFQqVVWXUmmEmFkzpsnYr76jRht13L6jRpt0XF6D0auIEM9zeVFm8RBjbqFn5tVpyszMhIeHh/a/o6OjAQBDhw7VtnXp0gWJiYkmH9vLywupqalISkrCihUrStxvypQpeP78OS5cuICePXuWuF9eXh4+/vhj9O/fH7t378bkyZOxadMmvPXWW/jiiy+QkWHaIFpCKsvpzGzczy+AuoQ76GrGkJJfgNOZ2ZVcGSGEiBOv2XNubm46j0g5ceIEpFIpOnfurG1jjEGhUJh8bFtbW9SqVavM/WrWrGnU8SIjI/H06VNMnTpVp33atGnYunUrDh48iNGjjfuLnpDycHBwQE5Ojmkvij0FLP8cDEDxG9ASjoPP58uQZeKyA9Y0zpAQQioTr07Tq6++ir/++gvvv/8+7OzssG3bNnTu3FlnGnNiYiK8vLzMVihfcXFxAIA2bdrotLdu3RoSiQRxcXEldpoKCgpQUFCg/e+srCwAgEKh0HYIJRIJpFIpVCqVzj1aTbtSqdSZSSiVSiGRSEpsL97R1DyjR6lUltiuVCrh5uYGpVIJuVwOtVqtc+mT4zjIZLIS20uqvSozFWUok0qlgoeHh17n3NIz2djYlJjJ4HkK6gG1TA7VT9+DpT/534E9PCF/dxZsgrpXeSaNinjvad7bmn2sIVPx9uK1M8bg4eGhd/tCyJnKOk9Ff4dZSyZjzpNSqYS7uzsA6O0v1EyltcvlcjDGdM61pWQy9iIPr07T3Llz0aNHD50xR7Nn/2+Wj1qtxt9//43Q0FA+hzer1NRUSKVSeHp66rTb2NjAzc0NDx8+LPG1y5Ytw6JFi/Tai84K9PX1RcuWLXHlyhUkJydr92nUqBEaN26Mc+fOIS0tTdseGBgIPz8/nDp1CtnZ/7ut0rFjR3h6euLo0aM6b7SQkBDY29sjPDxcp4Z+/fohLy8PkZGR2raIiAj0798f6enpiI2N1bY7OTkhNDQUKSkpiI+P17Z7eHigU6dOuH37Nm7evKltt6RMMpms1ExJSUlWl8nQeeq4dS8SD+7D45vXUejgiAxvH/g6u6IlINhMppynhIQEq8tU2nnq1KkTIiIirCpTaecpIiICSqUSR48etZpM1niezJEpMzMTT58+1Z5rS8lk7FhP3g/sPXjwIMLCwgAAI0eOxOuvv67dFh0djRkzZuCjjz7C8OHD+RweAHDhwgW0bdsWYWFhGDduXIn7vffee/jvf/9rcG2oiRMnYvv27QZ/IL6+vmjVqhX27t1r8LiGrjT5+PggPT1dOyXREnryKpUKd+/eRUBAAOzs7Kzyr5PitavVaiQkJCAgIEBnXyFnKus8FRYW4vbt2wgICNDWIfRMZZ0nzXu7QYMGsLGxsYpMxduL1w4Ad+/eRd26dXUeMyHkTGWdp/z8fO3vMKlUahWZjDlPKpUKCQkJaNCggd57QKiZSmuXy+VQKBS4deuW9lxbSqasrCy4u7tXzAN7AaB///7o37+/wW1du3bV3haravb29igsLDS4LT8/H/b29iW+1tbWFra2tnrtcrkccrlcp00qlUJq4FlgmjePse3Fj2ts+507d9CwYUMAL988hp7pU1J7SbVXdaaiiteuUChw8+ZNBAQEGNxfiJnKauc4Tnueix5PyJmMOU9F39vWkqmo4rWX9d4WYqay2qVSqcH3tpAzGVv7rVu3UL9+favKVFa7oXNd1ZlKqrU4a15MGMDL2XgqlQpPnjzRaS8sLMTTp0/h7e1dRZURQgghREh4X2kCXi4wef78eWRmZhpcc4HjOHz66afl+RblFhgYCODlrb5+/fpp2y9cuAC1Wq3dTgghhBBSGl6dpmfPnmHw4ME4ffp0qc+Ys4ROU2hoKFxdXfHjjz/qdJp+/PFHODg4lHiLUUgkEgl8fX0NXtq0VpRZHCizOIgxMyDO3ELPzKvTNHv2bPz9998IDg7G2LFjUadOnRLvF/KxZs0aZGZmame27d+/H/fv3wcATJ8+HS4uLkhKSsLmzZsBvLxqBABffvklAMDPzw9vv/02gJdjmr744gtMmzYNw4cPR+/evREdHY0tW7ZgyZIlcHV1NVvdVUUqlaJly5ZVXUalosziQJnFQYyZAXHmFnxmxoObmxtr3749U6vVfF5eJj8/PwbA4FdCQgJjjLHIyMgS9wkKCtI75i+//MIaNWrEbGxsWEBAAPv2229Nrv/58+cMAHv+/LkZUpqPUqlkly5dYkqlsqpLqTSUWRwosziIMTNj4sxtqZmN/XzndX0sLy8P3bp1q7AnpScmJoIxZvDL398fABAcHFziPlFRUXrHnDx5Mm7cuIGCggLcuXMHs2bNsponvavVaiQnJwv2AYh8UGZxoMziIMbMgDhzCz0zr05TYGAgr+fKEUIIIYQIFa9O08KFC7Fv3z6cOXPG3PUQQgghhFgkXqO3Hz16hP79+yMoKAhvvfUWWrVqVeIKmmPGjClXgaRsEokEjRo1EuxsBD4oszhQZnEQY2ZAnLmFnpnXY1QkEgk4jtNZbqD4+CDGGDiOM7h+k1BlZWXBxcWlzGXWCSGEECIcxn6+87rSpHnmHLEMSqUS586dQ7t27cy69IMlo8yU2VpRZnFkBsSZW+iZeVU8duxYc9dByoExhrS0tFIXGrU2lFkcKLM4iDEzIM7cQs8szJuKhBBCCCGVrFzXxhITE7F161bEx8cjKysLzs7OCAwMxFtvvaVdT4kQQgghxBrwGggOAN9//z3mzp0LpVKpd5lNLpfj66+/xsyZM81SpKWw1IHgarUaKSkp8PHxEeyMBFNRZspsrSizODID4sxtqZmN/Xzn1Wk6cOAABg4cCHd3d7z//vsICQmBl5cXHj16hMjISHzzzTd4+vQp9u3bZxUPxNWw1E4TIYQQQvgz9vOdVzfvm2++gaurKy5duoQFCxagQ4cO8PPzQ/v27TF//nxcvHgRNWrUwDfffMM7ADGeUqlEREQElEplVZdSaSizOFBmcRBjZkCcuYWemVen6dKlSxgxYgTq1KljcLuPjw/eeOMNXLx4sVzFEeMwxpCdnS3Y2Qh8UGZxoMziIMbMgDhzCz0zr05TYWEhqlWrVuo+jo6OKCws5FUUIYQQQoil4dVpatiwIfbv31/i5TWlUokDBw6gYcOG5SqOEEIIIcRS8Oo0jRkzBjdv3kTv3r31bsFduHABffv2xc2bN2kRzEoilUrRsWNHSKXSqi6l0lBmcaDM4iDGzIA4cws9M6/ZcyqVCsOGDcO+ffvAcRwcHBzg6emJJ0+eIDc3F4wxDBo0CHv27LGoKYXlRbPnCCGEEOtTobPnpFIp9u7diw0bNiA4OBg2NjZITk6GjY0NQkJCsHHjRvz5559W1WGyZAqFAgcPHoRCoajqUioNZRYHyiwOYswMiDO30DOXa0XwMWPGYMyYMeaqhZSDUKdvlgdlFgfKLA5izAyIM7eQM9OlIEIIIYQQI/DqNB04cABDhw7Fw4cPDW5/+PAhhg4dikOHDpWrOEIIIYQQS8FrIHjfvn3x8OFDXL58ucR9WrZsidq1a+PAgQPlKtCSWOpAcM1iYU5OTuA4rqrLqRSUmTJbK8osjsyAOHNbauYKHQh++fJltG/fvtR92rdvj/j4eD6HJzzY29tXdQmVjjKLA2UWBzFmBsSZW8iZeXWanj17Bk9Pz1L3cXd3R3p6Oq+iiGmUSiXCw8MFPbjOVJRZHCizOIgxMyDO3ELPzKvT5OHhgZs3b5a6z82bN+Hq6sqrKEIIIYQQS8Or09StWzfs378fV65cMbj98uXL2LdvH4KCgspVHCGEEEKIpeDVaZo3bx4AoEuXLli8eDFiY2ORnJyM2NhYLFq0CF27doVEIsGCBQvMWiwhhBBCSFXhNXsOAPbs2YOxY8ciLy9Pp50xBkdHR2zatAmDBw82R40Ww5JnzymVSshkMouajVCRKDNltlaUWRyZAXHmttTMxn6+814RfNiwYejatSs2bNiA8+fP4/nz56hevTratWuHsWPHwsPDg++hCQ95eXlwcnKq6jIqFWUWB8osDmLMDIgzt5Azl2tFcE9PT8ydOxe7du3C0aNHsXPnTnzwwQfUYapkSqUSkZGRgp2NwAdlFgfKLA5izAyIM7fQM9NjVAghhBBCjECdJkIIIYQQI1CnyUrIZLyHpwkWZRYHyiwOYswMiDO3kDPznj0nRpY6e44QQggh/FXos+eIZVGr1Xjy5AnUanVVl1JpKLM4UGZxEGNmQJy5hZ6ZOk1WQKVSITY2FiqVqqpLqTSUWRwosziIMTMgztxCz8yr0/Tw4UNz10EIIYQQYtF4dZr8/f0xaNAgHDhwQLCX2AghhBBCTMGr09ShQwfs378fgwYNgq+vLz777DMkJiaauTRiLI7j4OTkZFFL0lc0yiwOlNn6MZUK7Eoc/JPvgl2JAxPobRs+xHauAeFn5j177tatW1i3bh02b96MJ0+eQCKRoEePHpg8eTIGDRok6CmFJaHZc4QQoo8xhtzcXNNfGHsKbN1/wT1N+1+bmwcweRrQsZvJh3NwcBDshzGpWsZ+vpd7yQGlUom//voLv/76K44dOwbGGNzd3TFu3DhMnDgRDRs2LM/hLYqldprUajVSUlLg4+MDiUQcY/spM2W2VkLM/OLFCzg6Opr0moEeNbCleX0AgKRIR0f9/x9Jo6/ewb60DJOOmZOTg2rVqpn0mqokxHNdXpaaudKWHJDJZBg2bBgOHTqExMRELFy4EBKJBCtXrkSTJk0QEhKCnTt3gpaDqjgqlQrx8fGCnY3AB2UWB8psnSQAvm7o+/L/F7sypPnvrxr6Wv30bjGc6+KEntls70m1Wo2LFy/i/PnzSEtLA2MMPj4+OH36NN588020aNECt2/fNupYOTk5WLhwIfr06QNXV1dwHIcNGzYY3Pf69evo06cPHB0d4erqirfffhtpaWl6+6nVanz99deoW7cu7Ozs8Oqrr2L79u3liUwIIQSAvb09Hj9+bPxX5HHUsbPV6zBpSDgOPna2eBx53KTj2tvbV3JyIjblHnh07949/Prrr9i4cSMePXqkvfL07rvvIiQkBI8ePcK3336Lb7/9FlOmTMHx48fLPGZ6ejoWL14MX19ftGjRAlFRUQb3u3//Prp16wYXFxcsXboUOTk5WLlyJa5evYpz587BxsZGu+/HH3+M5cuXY/LkyWjbti3++usvjBo1ChzHYeTIkeX9MRBCiGjl5eWhZs2aRu8/vKYrwprVL3O/qSPfwK7Hz4w+rtBuzxHh4dVpUigU2LNnD9atW4eTJ09CrVajbt26WLp0KcaPHw9PT0/tvrVq1cJXX32FrKwsbNq0yajje3l5ITU1FbVq1cKFCxfQtm1bg/stXboUL168wMWLF+Hr+/JSb7t27dCzZ09s2LAB//nPfwAADx48wKpVqzBt2jSsWbMGADBp0iQEBQXhww8/xPDhwyGVSvn8KCwCx3Hw8PAQ1QBIyiwOlNk6PSpQmHU/oRLDuS5O6Jl5dZq8vb3x7NkzSKVSDBo0CO+88w569epV6mv8/PyQl5dn1PFtbW1Rq1atMvfbs2cPBgwYoO0wAUCPHj3QsGFD7Ny5U9tp+uuvv6BQKDB16lTtfhzHYcqUKRg1ahRiY2PRpUsXo2qzRDKZDJ06darqMioVZRYHyiwMDg4OyMnJMf4FKhUweRTwVH8ohZa7Bw4lHQNM+IPWwcHB+BosgBDPdXkJPTOvTpODgwNmzpyJiRMnwsvLy6jXTJ06FW+++Safb2fQgwcP8OTJE7Rp00ZvW7t27RAeHq7977i4OFSrVg1NmjTR20+z3VCnqaCgAAUFBdr/zsrKAvDySptC8fIvIIlEAqlUCpVKpbPQp6ZdqVTqDIKXSqWQSCQltmuOq6FZukGpVJbYrlKpcPfuXQQEBMDOzg5qtVpnkB3HcZDJZCW2l1R7VWYqSi6X69WuVquRkJCAgIAAnX2FnKms81RYWIjbt28jICBAW4fQM5V1njTv7QYNGsDGxsYqMhVvL147ANy9exd169bVmV1k6ZlsbGxMeu+pp8yC8suPURKbqbOBYmOUyspUtF0Iv/dUKhUSEhLQoEEDvfdAVbz3KuPfk0KhwK1bt7S/xywlU/FsJeHVaUpMTDT50pqzs7NZp+mnpqYCgMFOm5eXF549e4aCggLY2toiNTUVNWvW1KtZ89qSHguzbNkyLFq0SK/96NGj2r9ofH190bJlS1y5cgXJycnafRo1aoTGjRvj3LlzOgPTAwMD4efnh1OnTiE7O1vb3rFjR3h6euLo0aM6b7SQkBDY29vrdAIBoF+/fsjLy0NkZKS2LTExEf3790d6ejpiY2O17U5OTggNDUVKSgri4+O17R4eHujUqRNu376NmzdvatstKZNMJjOYydHRETk5ObCxscHVq1etIpMx5+nOnTu4c+eOVWUy5jwBQNOmTa0qU0nnqXnz5rh58yYePHigc/VGyJlKOk9dZi2A/Zb1YOlPtG35js6QT50FdOwqyEymnicAqFmzJqKjo60mU2nnKS0tTef3mKVkMnadsXKv01TRNGOawsLCMG7cOG17dHQ0unXrhh07duCNN97Qec1nn32GL774AhkZGahevTq6d++O1NRUXLt2TWc/tVoNqVSKmTNn4rvvvtP73oauNPn4+CA9PV3bAbSEnrxCocCxY8fQs2dPODg4WOVfJ0VrZyoVlFcu4crJKLQIDoW0eSC4/7+EL9RMRWsvqT0/Px9HjhxBz549IZfLrSJTWedJ897u3bs37OzsrCJT8fbitavVahw+fBi9evXSWSRYyJlKO08cY8i9cBZXT0WhebdgyFu0gtzWVtCZjD1Pmvd337599f6oF2qm0trlcjkKCgpw+PBh7e8xS8mUlZUFd3f3Mtdp4nWlacKECWXuI5FI4OzsjEaNGmHAgAGoXbs2n29VIs3U0qKdGo38/Hydfezt7Y3arzhbW1vY/v8/3qLkcjnkcrlOm1QqNTiYvKSV0Yu2l7aabmFhYantjDE8f/4cmZmZKCgoKHE9LEP5i7YbWkm3PJmKKv6z0tRdPFtZWQHorCDcHID6yF9QF1tBuKSshmiuGJojk6ntEonE4OJuJbVrzkXx919FnidT2/lkMlR78XbN/7emTBrFa9f8EpfJZAbrEWKmstptWrXF40dpaN2qrU69Qs5kSu0cx1ldppLaNXUX/z1W1ZlKyqB3HKP2KmbDhg3aD1hDH9Icx+m0T58+HZ999hk++eQTPt/OIM2tNc1tuqJSU1Ph6uqq7fB4eXkhMjISjDGdjoHmtd7e3mari4/c3FyTV9PVkADoXN0JtWzlWD9rOk5nZoPPI5Qre6oun8xFVxAueh7V6U+AZQutfgVhiUQCX19fi1pFt6JRZnEQY2ZAnLmFnplX1Xfv3sWAAQPg6emJpUuX4uTJk7hx4wZOnjyJpUuXombNmhg4cCDOnj2LX375Bd7e3li4cCF27NhhtsJr164NDw8PXLhwQW/buXPnEBgYqP3vwMBA5Obm4vr16zr7nT17VrtdiAZ61MC1zi1wqHUThDWrj0Otm+Ba5xYY6FGjqkszO1pB+OVfVi1bthT08himosziIMbMgDhzCz0zr8+YHTt24OzZs4iPj8f8+fPRtWtXNGzYEF27dsX8+fNx6dIlnDlzBpGRkZg0aRJOnz4NR0dHrF271qzFDxs2DAcOHEBKSoq27cSJE7h16xaGDx+ubRs0aBDkcrnO92eM4aeffkLt2rWrfPqjZrquSV/HwrHt1Qaobad7+7COnS22vdoAOcfCTTpeZU/VNTVzVuzfRq0gnBX7t0XnLg+VSoW4uDjBPn6AD8osDmLMDIgzt9Az87o9t379erzxxhslrgBbq1YtDB8+HOvWrcPcuXNRu3ZtDBgwAAcPHjT6e6xZswaZmZnamW379+/H/fv3Aby83efi4oKPPvoIu3btQkhICGbOnImcnBysWLECzZs3x/jx47XHqlOnDmbNmoUVK1ZAoVCgbdu22Lt3L6Kjo7F169Yq7/FyHGfSLSKmUiHv17VgAEqaw8it/xH2Ib20A6QtjamZlbk5MGakkm1uDmQCud1mKrVajeTkZDRr1qzK37OVhTJTZmsmxtxCz8yr03T//n2DA6SLsrOz03ZygJfTBDUDr42xcuVKJCUlaf/7jz/+wB9//AEAGD16NFxcXODj44OTJ09i9uzZmD9/PmxsbNC/f3+sWrVKr77ly5ejRo0a+Pnnn7FhwwY0aNAAW7ZswahRo4yuyVKor8brTNE1hKU9hvpqPKSBrSupqorFubqbdT9CCCHEVLw6TbVr18bevXvxxRdfwM7OTm97fn4+9u7dqzNj7smTJ6hRw/ixNomJiUbt17RpUxw5cqTM/SQSCRYsWIAFCxYYXYOlYs/SzbpfVShtxqBB9RoAbh5lriCcX68B8OKF0Yc1NGuQEEIIMYRXp2nixIn4+OOP0aVLF3z22Wfo3Lkz3Nzc8PTpU5w+fRqLFy/GvXv38MUXX2hfEx0djRYtWpitcGticgfCwbhZZwUOjigwsgNR2Z2H8s6eKzq2Sf3/MzVHR8Rgn4uLSccU2uy5Ro0aCXbWCR+UWRzEmBkQZ26hZ+a1uKVKpcL48eOxZcsW7QetRCLRLjTFGMOoUaOwadMmSCQSPH78GMuXL0efPn3Qu3dv8yaoRFlZWXBxcSlz8StTvXjxwqQOhATAtc4t4G1rY3BgtJoxPCgoRNPTl41efqCyOw+mZtYY6FEDXzf0RZ0iA+BT8gsw71ayycsNAMLqNBFCCKkYxn6+l2tF8IiICGzevBlXrlxBVlYWnJ2d0aJFC7z11lvo3r0738NaLEvpNAFGXHUxcc2iyu48mHx1rehrlUq8uHAWSXGX0KhDR8hebWnSQz2LEtLtOaVSiXPnzqFdu3YlLtRmbSgzZbZmYsxtqZmN/XznVfGpU6fg7OyM0NBQhIaG8i6SvGTyE8I1/n917KLjfCQensCkadj2/6tjm1JDZTJ19lxxtt1CcCYnDy3bdzJ6JVehY4whLS2txFXfrRFlFgcxZgbEmVvomXl1mkJCQvDOO++Yfd0lseLdgejRFyykFwrjLyAu4gRahnaHTWAbi11mgBBCCBEyXp0mT09Pg7PmSOXjpFJIXm2Fx/cfQfJqK+owEUIIIRWE1/D1nj17IioqSrCX16yNVCpFYGCgIBcK44syiwNlFgcxZgbEmVvomXkNBH/48CE6duyIXr164auvvoKrq2tF1GZxKmogOCGEEEKqjrGf77yuNI0ePRrVq1fHb7/9htq1a+OVV15BSEiIdmC45ssaZ9BZIqVSiYiICCiVyqoupdJQZnGgzOIgxsyAOHMLPTOvMU1RUVHa/19QUIAbN27gxo0bevsJZSq30DHGkJ2dLarbpZRZHCizOIgxMyDO3ELPzKvTpFnEkhBCCCFELIS5jjkhhBBCSCUr14rgwMuVpG/duoUXL16ga9eu5qrLIlnqQHC1Wo309HS4u7sL9nk+pqLMlNlaUWZxZAbEmdtSM1foQHAASExMxKBBg1CjRg20bdsWISEh2m2nT5/GK6+8ojP2iVQciUQCT09Pi3oDVjTKLA6UWRzEmBkQZ26hZ+ZVdXJyMjp06IDw8HAMGjQIHTt21BnU1b59e6Snp2P79u1mK5SUTKFQ4ODBg1AoFFVdSqWhzOJAmcVBjJkBceYWemZenaaFCxciIyMDJ0+exO7du9GzZ0+d7TKZDF27dsXp06fNUiQpm1Cnb5YHZRYHyiwOYswMiDO3kDPz6jQdOXIEQ4YMQadOnUrcx8/PDw8ePOBdGCGEEEKIJeHVaXr27Bn8/f1L3YcxhoKCAj6HJ4QQQgixOLw6TTVr1sTt27dL3efq1avw9fXlVRQxjUwmQ0hICGQyXstuCRJlFgfKLA5izAyIM7fQM/N+YO+BAwdw5coVg9ujo6MRERGBfv36las4Yjx7e/uqLqHSUWZxoMziIMbMgDhzCzkzr07TJ598Ant7e3Tr1g1LlizBnTt3AACHDh3Cp59+ij59+sDd3R0ffvihWYslhimVSoSHhwt6cJ2pKLM4UGZxEGNmQJy5hZ6Z1/Uxf39/HDlyBCNHjsSnn34KjuPAGMOAAQPAGIOvry92794NLy8vc9dLCCGEEFIleN9UbN++PW7fvo39+/fj7NmzePbsGZydndG+fXsMGjQINjY25qyTEEIIIaRKlWsklkwmw5AhQzBkyBBz1UMIsSKMMeTm5vJ+7fPnz5GZmYmcnJxy/SHm4OAAjuN4v54QQgAzPHtOTCz12XOMMSiVSshkMtF8MFBmYWR+8eIFHB0dq7oM5OTkoFq1alVdhlGEeJ7LS4yZAXHmttTMxn6+877SVFhYiL179+L8+fPIzMyESqXS24fjOKxfv57vtyAmyMvLg5OTU1WXUakoM7FWYjzPYswMiDO3kDPz6jQlJSWhZ8+euHv3Lkq7UEWdpsqhVCoRGRmJfv36QS6XV3U5lYIyCyOzg4MDcnJyeL32xYsXqFmzJgDg/v37qF69ernqEAohnufyEmNmQJy5hZ6ZV6fp/fffx507d/D2229jwoQJqFOnjmAXqiKEVByO4/jfFlOp0LW6E2rZyuGYcAcOHbuAk0rNWyAhhJiAV08nIiIC3bt3x8aNG81dDyGEQBkdCaxZhUOtm7xs+Hwu8tw9YTNtNmRdQ6q2OEKIaPFa3FKtVqNly5bmroWUgxiv9FFm66SMjkTBovnA0zSddpb+BAWL5r/sUFk5MZzn4sSYGRBnbiFn5jV7rmfPnrCzs8P+/fsroiaLZamz5wixFkylQt5bg8HSn5S4D+dRE/Zb/qRbdYQQszH2853Xlably5cjIiICu3fv5l0gMR+1Wo0nT55ArVZXdSmVhjJbJ/XV+FI7TADA0h5DfTW+cgqqAmI4z8WJMTMgztxCz8zrGtnBgwcREhKCESNGICgoCK1atTLYM+M4Dp9++mm5iySlU6lUiI2NRb9+/SCR8OoHCw5lts7M7Fm6WfcTIjGc5+LEmBkQZ26hZ+bVafr888+1/z8qKgpRUVEG96NOEyHEFJyru1n3I4QQc+LVaYqMtP6BmISQyidpHgjO3bPMMU2S5oGVVxQhhPw/Xp2moKAgc9dByoHjODg5OVnUkvQVjTILA69nz02aCiz/HAyAoaRs4hTk5uebdEghPXtOiOe5vMSYGRBnbqFnpmfPmYBmzxFiGr7PnhvoUQNfN/RFHTtbbVtKfgHm3UrGvrQMk48npGfPEUIqX4XOngNeLoX+7bffol27dnB2dtZZdyE+Ph5Tp07FrVu3+B6emECtViMpKUmwsxH4oMzWbV9aBl45fRl9L17H+H/uoO/F62h6+jKvDpPQiOk8a4gxMyDO3ELPzOv2XF5eHnr16oWYmBi4u7vD2dkZL1680G6vW7cuwsLC4Orqii+//NJsxRLDVCoV4uPj4e3tLcjZCHxQZmFkLs+z5xhjeP78OY4fP46BAwfCxsamXHUIhRDPc3mJMTMgztxCz8yr4qVLl+L06dNYtmwZHj16hEmTJulsd3FxQVBQEI4cOWKWIgkhwqR59hyfL0dHR3h6eqJ69epwdHTkfZxq1aoJdvwEIcSy8Oo07dixAyEhIZg7dy44jjP4C6levXpITk4ud4GEEEIIIZaAV6cpOTkZbdq0KXUfJycnPH/+nFdRxrp48SL69OkDZ2dnODk5oVevXoiPjze4b0xMDLp06QIHBwfUqlULM2bM4H3bwNJwHAcPDw9R/TVNmcWBMouDGDMD4swt9My8xjQ5OTnhyZPSH3Vw9+5deHh48CrKGJcuXUKXLl3g4+ODhQsXQq1WY+3atQgKCsK5c+fQqFEj7b7x8fHo3r07mjRpgm+++Qb379/HypUrcfv2bRw6dKjCaqwsMpkMnTp1quoyKhVlFgfKLA5izAyIM7fQM/O60tShQwfs378fmZmZBrenpKQgPDwc3bp1K09tpfr0009hb2+P2NhYzJkzBx9++CFiYmKgVqvx0Ucf6ez70UcfoUaNGoiKisK7776LL7/8EmvWrMHhw4dx9OjRCquxsqhUKty4cQMqlaqqS6k0lFkcKLM4iDEzIM7cQs/Mq9P04YcfIiMjA927d8fp06ehVCoBALm5uThx4gR69+4NpVKJ2bNnm7XYoqKjo9GjRw+4ublp27y8vBAUFIQDBw5ob71lZWXh2LFjGD16tM7aC2PGjIGjoyN27txZYTVWFrVajZs3bwp2CicflFkcKLM4iDEzIM7cQs/M6/Zct27dsGbNGsycOVPnapKTkxMAQCqVYu3atWjdurV5qjSgoKAA9vb2eu0ODg4oLCzEP//8gw4dOuDq1atQKpV6Y7BsbGwQGBiIuLi4Ur9HQUGB9r+zsrIAAAqFAgqFAgAgkUgglUqhUql03gSadqVSiaLrh0qlUkgkkhLbNcfV0Kx/pemYGmrXvEahUEAul0OtVuv04jmOg0wmK7G9pNqrMlNRhjJp9lGr1TrHF3ImY84TAIt675kjU2nnSVOTSqWCXC63ikzF24vXrtmneC1CzmTsedL8rzVlKq12zT6MMb39hZqptHZNpqLZLSVT8Wwl4dVpAoApU6YgODgYP/30E86ePYtnz57B2dkZ7du3x9SpU9G0aVO+hzZKo0aNcObMGahUKkilUgBAYWEhzp49CwB48OABACA1NRXAy6tQxXl5eSE6OrrE77Fs2TIsWrRIr/3o0aPadV98fX3RsmVLXLlyRWe2YKNGjdC4cWOcO3cOaWlp2vbAwED4+fnh1KlTyM7O1rZ37NgRnp6eOHr0qM4bLSQkBPb29ggPD9epoV+/fsjLy9N5DmBkZCT69++P9PR0xMbGatudnJwQGhqKlJQUnYHyHh4e6NSpE27fvo2bN29q2y0pk0wmM5hJs8r0gwcPcPXqVavIVNZ5unv3LgDg2LFjVpPJ2PN09+5dNG3a1KoylXSemjdvDuDl5JWik1WEnKms86SpUfPetoZMxp4n4OWK9UU/i4SeqbTz9PTpUwD/O9eWksnYxz0J9jEqP/30E6ZMmYKxY8di7ty5UKvV+PLLL/HHH39AoVBg8+bNGD16NDZv3owxY8bg7NmzaNeunc4xxowZg3379pU4NsvQlSYfHx+kp6drb/VZQk9epVLh33//RdOmTWFnZ2e1f50UrV2tVuPatWto1qyZzr5CzlTWeSosLMTVq1fRtGlTbR1Cz1TWedK8t5s3bw4bGxuryFS8vXjtAPDPP//glVde0Vn8T8iZyjpP+fn52t9hUqnUKjIZc55UKhWuXbuGV199Ve89INRMpbXL5XIoFApcuXJFe64tJVNWVhbc3d3LfIyKYDtNAPDxxx9jxYoV2tBt2rRB7969sWTJEvz5558YPHgwdu/ejeHDh+PUqVPo2rWrzuvfeOMNREdHa69GlYWePUcIIYRYnwp/9pwlWLJkCR4/fozo6GhcuXIF58+f1/Y8GzZsCOB/t+UMdYxSU1Ph7e1deQVXEJVKhbi4OMHORuCDMosDZRYHMWYGxJlb6JkF3WkCgBo1aqBLly7acQDHjx9HnTp10LhxYwBAs2bNIJPJcOHCBZ3XFRYWIj4+HoGBgZVdstmp1WokJycLdjYCH5RZHCizOIgxMyDO3ELPLPhOU1E7duzA+fPnMWvWLO1YABcXF/To0QNbtmzRGQS2efNm5OTkYPjw4VVVLiGEEEIEhPfsuap26tQpLF68GL169YKbmxvOnDmDsLAw9OnTBzNnztTZd8mSJejUqROCgoLwn//8B/fv38eqVavQq1cv9OnTx+jvqRn+pVl6wFIoFArk5uYiKysLcrm8qsupFJSZMlsryiyOzIA4c1tqZs3nepnDvJlA3blzh/Xq1Yu5u7szW1tb1rhxY7Zs2TJWUFBgcP/o6GjWqVMnZmdnxzw8PNi0adNYVlaWSd8zJSWFAaAv+qIv+qIv+qIvK/xKSUkptR8g6NlzlU2tVuPhw4dwcnKyqIcNapZCSElJEc2sPspMma0VZRZHZkCcuS01M2MM2dnZ8Pb21lnqozjB3p6rChKJBHXq1KnqMkrk7OxsUW/CykCZxYEyi4MYMwPizG2JmV1cXMrcx6oGghNCCCGEVBTqNBFCCCGEGIE6TVbA1tYWCxcuhK2tbVWXUmkoszhQZnEQY2ZAnLmFnpkGghNCCCGEGIGuNBFCCCGEGIE6TYQQQgghRqBOEyGEEEKIEajTRAghhBBiBOo0EUIIIWbi7++PcePGVXUZpIJQp0lA7t69i3feeQf16tWDnZ0dnJ2d0blzZ3z//feYN28eOI4r8ys4OLiqY5iEMutmzsvL0+6nUqkQFhaG4OBguLq6wtbWFv7+/hg/fjwuXLhQ4XVu2LCh1J/7mTNnzPr91Go1NmzYgIEDB8LHxwfVqlVDs2bN8OWXXyI/P7/E112/fh0cx8HOzg6ZmZkG9wkODkazZs3MWm9pEhIS8N5776Fhw4ZwcHCAg4MDXnnlFUybNg1Xrlwx+Jq5c+eC4ziMGDGi0uo0J2vIXNp7fv78+VVdXqUx9t9+0TaJRAJvb2/06tULUVFROsfz9/cHx3Ho0aOHwe+3bt067XEq43dbaegxKgJx8OBBDB8+HLa2thgzZgyaNWuGwsJC/P333/jwww/RpUsXbN68Wbt/Tk4OpkyZgiFDhmDo0KHa9po1a1ZF+bxQZv3M//77L3755Rfk5eVh6NChOHz4MLp164aPPvoIrq6uSExMxM6dO7Fx40YkJydXymN/Fi9ejLp16+q1169f36zfJzc3F+PHj0eHDh3w7rvvwtPTE7GxsVi4cCFOnDiBiIgIg8+E3LJlC2rVqoWMjAzs3r0bkyZNMmtdpjpw4ABGjBgBmUyGt956Cy1atIBEIsGNGzfwxx9/4Mcff0RCQgL8/Py0r2GMYfv27fD398f+/fuRnZ0NJyenKkxhGmvLbOg9X5mdbkthzL/9nj17YsyYMWCMISEhAWvXrkVoaCgOHjyIvn37avezs7NDZGQkHj16hFq1aukcb+vWrbCzsyv1j6NKU+rjfIlFuHfvHnN0dGSNGzdmDx8+1Nt++/Zt9t133+m0paWlMQBs4cKFlVSleVHm0jNPmzaNAWDffvut3n5KpZKtWLGizKd1l1dYWBgDwM6fP1+h30ejoKCAnT59Wq990aJFDAA7duyY3ja1Ws38/f3Z7Nmz2ZAhQ1hwcLDBYwcFBbGmTZuavebi7ty5w6pVq8aaNGli8BwrFAr2/fffs+TkZJ32iIgIBoBFREQwuVzONmzYUOG1mos1ZTbmPe/n58fGjh1beUVVAWP/7QNg06ZN02m7cuUKA8B69eqlbfPz82Pdu3dnzs7Oer/XU1JSmEQiYcOGDavU3zclodtzAvD1118jJycH69evh5eXl972+vXrY+bMmVVQWcWhzCVnvn//Pn7++Wf07NkTs2bN0ttPKpXigw8+sJiHS6vVanz//fdo3rw57Ozs4OHhgT59+uhcZlcqlfjiiy8QEBCgvc340UcfoaCgQLuPjY0NOnXqpHf8IUOGAHh5G66406dPIzExESNHjsTIkSNx6tQp3L9/vwJSGufrr7/GixcvEBYWZvAcy2QyzJgxAz4+PjrtW7duxSuvvIKQkBD06NEDW7duraySy02MmYu7d+8ehg8fDldXVzg4OKBDhw44ePCgzj5RUVHgOA47duzARx99hFq1aqFatWoYOHAgUlJSdPa9ffs2hg0bhlq1asHOzg516tTByJEj8fz588qMxUvz5s3h7u6OhIQEnXY7OzsMHToU27Zt02nfvn07atSogd69e1dmmSWi23MCsH//ftSrV8/gB4a1oswlO3ToEJRKJd5+++1Kqqx0z58/R3p6uk4bx3Fwc3MDAEycOBEbNmxA3759MWnSJCiVSkRHR+PMmTNo06YNAGDSpEnYuHEjXn/9dcyZMwdnz57FsmXLcP36dfz555+lfv9Hjx4BANzd3fW2bd26FQEBAWjbti2aNWsGBwcHbN++HR9++KE5opvswIEDqF+/Ptq3b2/0awoKCrBnzx7MmTMHAPDmm29i/PjxBm9jWCJrzGzoPW/o/QcAjx8/RqdOnZCbm4sZM2bAzc0NGzduxMCBA7F7925tp19jyZIl4DgO8+bNw5MnT/Ddd9+hR48eiI+Ph729PQoLC9G7d28UFBRg+vTpqFWrFh48eIADBw4gMzMTLi4uFZa7uLL+7RuSkZGBjIwMg7fvR40ahV69euHu3bsICAgAAGzbtg2vv/465HK5eYvnq0qvc5EyPX/+nAFggwYNMul1Qr5VRZlL9/777zMALC4ursLrKo3mEr2hL1tbW8bY/26xzJgxQ+/1arWaMcZYfHw8A8AmTZqks/2DDz7Q3p4pTY8ePZizszPLyMjQaS8sLGRubm7s448/1raNGjWKtWjRQu8YlXF7TnOOBw8erLctIyODpaWlab9yc3O123bv3s0AsNu3bzPGGMvKymJ2dnYGb81aGmvLXNp7XqP47blZs2YxACw6Olrblp2dzerWrcv8/f2ZSqVijDEWGRnJALDatWuzrKws7b47d+5kANj333/PGGMsLi6OAWC7du2q4LQlM+bfPmMvb89NnDiRpaWlsSdPnrCzZ8+y7t27MwBs1apV2v38/PxY//79mVKpZLVq1WJffPEFY4yxa9euMQDs5MmTlT4coCR0e87CZWVlAYDFDICsDJTZfPtWhv/+9784duyYztehQ4cAAHv27AHHcVi4cKHe6zSDtsPDwwEAs2fP1tmuucpQ/DZGUUuXLsXx48exfPlyVK9eXWfboUOH8PTpU7z55pvatjfffBOXL1/Gv//+a3rQctKcN0dHR71twcHB8PDw0H7997//1W7bunUr2rRpo/3L3MnJCf379xfE7SprzWzoPV+S8PBwtGvXDl26dNG2OTo64j//+Q8SExNx7do1nf3HjBmj82/79ddfh5eXl/bfieZK0pEjR5Cbm2vOWCYr7d++xvr16+Hh4QFPT0+0b98ep0+fxuzZs0scWvDGG29g+/btAF6+D3x8fNC1a9fKiGMUuj1n4ZydnQEA2dnZVVxJ5aHM5tu3MrRr1057m624u3fvwtvbG66uriW+PikpCRKJRO9yfa1atVC9enUkJSUZfN2OHTvwySefYOLEiZgyZYre9i1btqBu3bqwtbXFnTt3AAABAQFwcHDA1q1bsXTpUmMjmoXmgzAnJ0dv288//4zs7Gw8fvwYo0eP1rZnZmYiPDwc7733njYDAHTu3Bl79uzBrVu30LBhw4ovnidrzVzae764pKQkg7cmmzRpot1edOZdgwYNdPbjOA7169dHYmIiAKBu3bqYPXs2vvnmG2zduhVdu3bFwIEDMXr06Eq9NQcY93MYNGgQ3nvvPXAcBycnJzRt2hTVqlUrcf9Ro0bhhx9+wOXLl7Ft2zaMHDnS4KzYqkKdJgvn7OwMb29v/PPPP1VdSqWhzKVr3LgxAODq1asIDAys4Moqjym/GI8dO4YxY8agf//++Omnn/S2Z2VlYf/+/cjPz9f7EAJejpPQjB2pLC4uLvDy8jJ4jjUfqpoPRo1du3ahoKAAq1atwqpVq/Ret3XrVixatKhC6jUHMWauDKtWrcK4cePw119/4ejRo5gxYwaWLVuGM2fOWMwEEI06deqUuP6SIe3bt0dAQABmzZqFhIQEjBo1qgKrMx3dnhOAAQMG4O7du4iNja3qUioNZS5Z3759IZVKsWXLlkqqjL+AgAA8fPgQz549K3EfPz8/qNVq3L59W6f98ePHyMzM1Fm7BwDOnj2LIUOGoE2bNti5cydkMv2//f744w/k5+fjxx9/xK5du3S+vvzySyQlJeH06dPmCWmC/v37486dOzh37pxR+2/duhXNmjXTy7Br1y706NFDb6aRJRJj5qL8/Pxw8+ZNvfYbN25otxdV/N8BYwx37tyBv7+/Tnvz5s3xySef4NSpU4iOjsaDBw8M/gEhRG+++SaioqLQpEkTy/vDsEpHVBGjaNY5eeWVV9ijR48Mbre2NYsoc+mZ3333XQaA/fDDD3r7qVQqtnLlSotYp8mUgeD/+c9/dLbPnTtXbyD4tWvXmJubG2vatCl79uxZid+3e/furF69ega35efnM0dHR/buu+9q2yprnaZbt24xBwcH1rRpU4Pn+N69ewwAW7FiBUtOTmYcx7HFixcbPNbWrVsZAHbmzJmKLrtcrCkzn3WaNAPBY2JitG05OTmsXr16Jg0E1/zbf/78OVMoFDrfMysri0kkEvbBBx+YI2aZyrNOkyGageAaiYmJbOHChSw8PNzk71nR6PacAAQEBGDbtm0YMWIEmjRporNSdExMDHbt2mV1zzqizKVnXrVqFe7evYsZM2bgjz/+wIABA1CjRg0kJydj165duHHjBkaOHFkpdR86dEj7V3NRnTp1QkhICN5++2388MMPuH37Nvr06QO1Wo3o6GiEhITgvffeQ4sWLTB27Fj88ssvyMzMRFBQEM6dO4eNGzdi8ODBCAkJAfByDFfv3r2RkZGBDz/8UG+AeEBAADp27IiHDx8iMjISM2bMMFivra0tevfujV27duGHH36o1KnMDRo0wLZt2/Dmm2+iUaNG2tWx2f+vlrxt2zZIJBLUqVMH27ZtA2MMAwcONHisfv36QSaTYevWrSZN569sYsxc1Pz587F9+3b07dsXM2bMgKurKzZu3IiEhATs2bMHEonuDR9XV1d06dIF48ePx+PHj/Hdd9+hfv36mDx5MgAgIiIC7733HoYPH46GDRtCqVRi8+bNkEqlGDZsWKVmK+3ffr169Xgf18/PD59//nk5KqtAVdplIya5desWmzx5MvP392c2NjbMycmJde7cma1evZrl5+fr7Cv0qy4alLnkzEqlkv3666+sa9euzMXFhcnlcubn58fGjx9fKcsRlDbtGAALCwvT1rlixQrWuHFjZmNjwzw8PFjfvn3ZxYsXtcdSKBRs0aJFrG7dukwulzMfHx+2YMECnbwJCQmlfj/NX/erVq1iANiJEydKrH3Dhg0MAPvrr78YY5V3pUnjzp07bMqUKax+/frMzs6O2dvbs8aNG7N3332XxcfHM8YYa968OfP19S31OMHBwczT01PvyoMlsobMfFcEv3v3Lnv99ddZ9erVmZ2dHWvXrh07cOCAzj6aK03bt29nCxYsYJ6ensze3p7179+fJSUlafe7d+8emzBhAgsICGB2dnbM1dWVhYSEsOPHj5s1a2mM/bcPnleaSvueVX2liWOMsUrpnRFCCCHEoKioKISEhGDXrl14/fXXq7ocUgIaCE4IIYQQYgTqNBFCCCGEGIE6TYQQQgghRqAxTYQQQgghRqArTYQQQgghRqBOE0FUVBQ4jkNUVFRVl0KI6Hz++ee8H+eyefNmNG7cGHK5XO+BxYQQ86PFLQkhRIBu3LiBcePGoU+fPpg/fz4cHByquiSzWrt2LRwcHKxuEVsibNRpIoQQAYqKioJarcb333+P+vXrV3U5Zrd27Vq4u7tTp4lYFLo9J2BqtRr5+flVXQYhpAo8efIEAOi2HCGViDpNFkAzpuHGjRt444034OzsDDc3N8ycOVOnU8RxHN577z1s3boVTZs2ha2tLQ4fPgwAePDgASZMmICaNWvC1tYWTZs2xW+//ab3ve7fv4/BgwejWrVq8PT0xPvvv4+CgoJKy3jr1i2MHj0aLi4u8PDwwKeffgrGGFJSUjBo0CA4OzujVq1aWLVqlc7rCwoKsHDhQtSvXx+2trbw8fHB3Llz9WoPCwtDaGgoPD09YWtri1deeQU//vijXj0XLlxA79694e7uDnt7e9StWxcTJkyo0J8BIX///Tfatm0LOzs7BAQE4Oeffza435YtW9C6dWvY29vD1dUVI0eOREpKina7v78/Fi5cCADw8PAAx3FV+qyupKQkTJ06FY0aNYK9vT3c3NwwfPhwJCYm6uxX0vitDRs2gOM47f7+/v74999/cfLkSXAcB47jEBwcrN3/3r17GD58OFxdXeHg4IAOHTroPYuQkIpAt+csyBtvvAF/f38sW7YMZ86cwQ8//ICMjAxs2rRJu09ERAR27tyJ9957D+7u7vD398fjx4/RoUMHbafKw8MDhw4dwsSJE5GVlYVZs2YBAPLy8tC9e3ckJydjxowZ8Pb2xubNmxEREVFpGTUPo12+fDkOHjyIL7/8Eq6urvj5558RGhqKr776Clu3bsUHH3yAtm3bolu3bv/X3r1HRXGefwD/DstlEZDlqlhlwRVvKFhR0YqsIEcKCGIghNBYDEZqJKitibVNepAcrdoqamnFg+eIaJuoEW9YL62K4AU0XuqNS1BYTFERRZCAF4Tn94e/nTDugouikOb5nMM57DvvvPO8M7vDw8w776KlpQVhYWE4ceIE4uPjMWTIEFy+fBmrV6/GN998g927d4vtp6Wlwd3dHWFhYTA2NkZ2djbmzJmDlpYWJCQkAHj2H/rkyZPh4OCARYsWQaFQQKPRYOfOnW9sP7Afn8uXL4vvu8WLF+Pp06dISkpCr169JPWWLl2KP/zhD4iKisIHH3yA6upqpKamwtfXFxcuXIBCocCaNWuwefNm7Nq1C2lpabC0tISHh0cX9Qz4+uuvcerUKURHR6Nv377QaDRIS0vDxIkTUVhY2OHxVmvWrEFiYiIsLS3x6aefAoC4n6qqqvCzn/0MjY2NmDt3Luzs7JCZmYmwsDDs2LED06ZN6/T+MSbq0m++Y0RElJSURAAoLCxMUj5nzhwCQBcvXiSiZ19+aGRkRFevXpXUmzlzJjk5OdHdu3cl5dHR0WRtbU2NjY1ERLRmzRoCQNu3bxfrNDQ00IABAwgA5eTkvIbePaPtY3x8vFj29OlT6tu3LwmCQMuXLxfL79+/T+bm5uKXXm7ZsoWMjIzo+PHjkjbXr19PAOjkyZNimbavrQUGBlL//v3F17t27eoWX/zIflzCw8NJLpdLvny1sLCQZDIZaU/FGo2GZDIZLV26VLLu5cuXydjYWFKu/UxVV1e/mQ60Q9/nLj8/nwDQ5s2bxTJtzM/TfhlreXm5WObu7k5qtVqn7vz58wmA5HxQX19Prq6u5OLiQs3Nza/WGcbawbfnuhHtlRCtxMREAMD+/fvFMrVajaFDh4qviQhZWVkIDQ0FEeHu3bviT2BgIOrq6nD+/HmxHScnJ8mXQfbo0QPx8fGvs1sSH3zwgfi7TCbDqFGjQESYOXOmWK5QKDBo0CCUlZUBAL766isMGTIEgwcPlvTP398fAJCTkyOua25uLv5eV1eHu3fvQq1Wo6ysDHV1dWL7ALBv3z40NTW9tr4yptXc3IxDhw4hPDwczs7OYvmQIUMQGBgovt65cydaWloQFRUlea/37t0bbm5ukvd6d9L6c9fU1IR79+5hwIABUCgU4vmns+zfvx9jxoyBj4+PWGZpaYn4+HhoNBoUFhZ26vYYa41vz3Ujbm5uktcqlQpGRkaScQGurq6SOtXV1aitrUV6ejrS09P1tqsdMFpRUYEBAwbojCkYNGhQJ0RvmNZ/MADA2toacrkc9vb2OuX37t0DAJSWlqKoqAgODg5629T2DwBOnjyJpKQk5Ofno7GxUVKvrq4O1tbWUKvViIiIQHJyMlavXo2JEyciPDwcMTExMDMz64xuMiZRXV2Nhw8f6nzGgWefP+0/RqWlpSAivfUAwMTE5LXG+bIePnyIZcuWISMjA5WVlaBWXzSh/Wels1RUVMDb21unfMiQIeLyYcOGdeo2GdPipKkb0zdgsvV/dMCzJ+gA4L333kNsbKzedrpyrMPzZDKZQWUAxBNvS0sLhg8fjpSUFL31+vXrBwC4fv06Jk2ahMGDByMlJQX9+vWDqakp9u/fj9WrV4v7ShAE7NixAwUFBcjOzsahQ4cQFxeHVatWoaCgAJaWlp3RVcY6rKWlBYIg4MCBA3o/F931vZmYmIiMjAzMnz8f48aNg7W1NQRBQHR0tPi5A/Sf04BnV+IY+yHgpKkbKS0tlVxJunbtGlpaWuDi4tLmOg4ODrCyskJzczMCAgLabV+pVOLKlSsgIsnJq6Sk5JVjf51UKhUuXryISZMmtTtzcnZ2Nh4/foy9e/dKrmi1dUtj7NixGDt2LJYuXYovvvgCv/jFL7B161bJLUTGOoODgwPMzc1RWlqqs6z150+lUoGI4OrqioEDB77JEF/Jjh07EBsbK3nq9dGjR6itrZXUs7GxAQDU1tZKpkqoqKjQabOtz7pSqdR7ziouLhaXM/a68JimbuRvf/ub5HVqaioAICgoqM11ZDIZIiIikJWVhStXrugsr66uFn8PDg7GzZs3sWPHDrGssbGxzdt63UVUVBQqKyuxYcMGnWUPHz5EQ0MDgO+vWD1/ayAjI0Oyzv379yV1AGDEiBEA8EamX2A/PjKZDIGBgdi9ezdu3LghlhcVFeHQoUPi67feegsymQzJyck671EiEm9ZdzcymUwn3tTUVJ0rSCqVCgCQl5cnljU0NCAzM1OnTQsLC52kC3h2Hjtz5gzy8/MlbaSnp8PFxUUy5pOxzsZXmrqR8vJyhIWF4ec//zny8/Px97//HTExMfD09Gx3veXLlyMnJwfe3t6YNWsWhg4dipqaGpw/fx6HDx9GTU0NAGDWrFn461//il/+8pc4d+4cnJycsGXLlm7/9QvTp0/H9u3bMXv2bOTk5GD8+PFobm5GcXExtm/fjkOHDmHUqFGYPHkyTE1NERoail/96lf47rvvsGHDBjg6OuLWrVtie5mZmVi3bh2mTZsGlUqF+vp6bNiwAT179kRwcHAX9pT9L0tOTsbBgwcxYcIEzJkzB0+fPkVqairc3d1x6dIlAM+SiiVLluB3v/sdNBoNwsPDYWVlhfLycuzatQvx8fH4+OOPu7gnuqZMmYItW7bA2toaQ4cORX5+Pg4fPgw7OztJvcmTJ8PZ2RkzZ87EJ598AplMho0bN8LBwUGSTAKAl5cX0tLSsGTJEgwYMACOjo7w9/fHokWL8OWXXyIoKAhz586Fra0tMjMzUV5ejqysLBgZ8bUA9hp1yTN7TEL7GG5hYSFFRkaSlZUV2djY0EcffUQPHz4U6wGghIQEvW1UVVVRQkIC9evXj0xMTKh37940adIkSk9Pl9SrqKigsLAw6tGjB9nb29O8efPo4MGDb2zKgecfj46NjSULCwud+mq1mtzd3cXXT548oRUrVpC7uzuZmZmRjY0NeXl5UXJyMtXV1Yn19u7dSx4eHiSXy8nFxYVWrFhBGzdulDzOfP78eXr33XfJ2dmZzMzMyNHRkaZMmUJnz559PZ1n7P/l5uaSl5cXmZqaUv/+/Wn9+vV6H8PPysoiHx8fsrCwIAsLCxo8eDAlJCRQSUmJWKc7TTlw//59ev/998ne3p4sLS0pMDCQiouLSalUilOHaJ07d468vb3J1NSUnJ2dKSUlRe+UA7dv36aQkBCysrIiAJLpB65fv06RkZGkUChILpfTmDFjaN++fW+ms+xHTSB67poqe+MWL16M5ORkVFdX6zxFxhhjjLHuga9jMsYYY4wZgJMmxhhjjDEDcNLEGGOMMWYAHtPEGGOMMWYAvtLEGGOMMWYATpoYY4wxxgzASRNjjDHGmAE4aWKMMcYYMwAnTYwxxhhjBuCkibFubNOmTRAEAZs2berqUAyyePFiCIKAY8eOdXUo3ZpGo4EgCJgxY0aXbH/ixIkQBKFLtt1ddfUxYT8MnDQx1oV+aCfqY8eOQRAELF68uKtD6fY4Mek4FxcXuLi4dHUYjLXJuKsDYIy1bdq0aRg7diycnJy6OhSDfPTRR4iOjoazs3NXh8IYY52OkybGujFra2tYW1t3dRgGs7e35y+dZoz9z+Lbc4x1QF5eHkJDQ2Fvbw8zMzO4ubnhs88+Q2Njo07drKwsqNVqODo6Qi6Xo0+fPggICEBWVhaAZ+OVXF1dAQCZmZkQBEH80Y4JamtMkyAImDhxIiorKxETEwN7e3tYWVkhJCQEZWVlAICioiKEh4fD1tYWVlZWiIyMRFVVlU6cGzduxNSpU+Hi4gK5XA5bW1sEBgYiJydHUm/x4sXw8/MDACQnJ0vi1Wg0Yp22xjRlZ2fDz88P1tbWMDc3h6enJ1JSUvD06VNJvda3LK9du4Zp06bBxsYGFhYWCAgIwMWLF9s/SK3MmDEDgiCgrKwMK1euxMCBA2Fubo6hQ4di69atAIAnT57g008/Ffvv4eGBAwcO6G2vvr4eSUlJcHd3h7m5ORQKBQIDA3HixAlJPUEQkJubK/6u/dF3G7Yjfbxy5QqioqLg6OgIMzMzuLq6Yv78+bh3757e+idOnIBarYaFhQXs7Ozwzjvv4NtvvzV090lkZGTA29sblpaWsLS0hLe3t96xdq1v4Z46dQqTJ0+GQqFo91al9phXVFSgoqJCss+evxVsaBwvW5+x9vCVJsYMlJaWhoSEBCgUCoSGhsLR0RFnz57F0qVLkZOTg5ycHJiamop158yZAycnJ0ybNg12dna4ffs2zpw5g127diEiIgIjRozAvHnzsHbtWnh6eiI8PFzcliHjOu7fvw8fHx/07t0bsbGx+Oabb7Bv3z4UFxdjz549mDBhAry8vBAXF4dz584hKysLNTU1OHr0qKSdhIQEeHp6IiAgAA4ODqisrMTu3bsREBCAnTt3YurUqQCejdHRaDTIzMyEWq3GxIkTxTYUCkW7saakpGDBggWwtbVFTEwMLCwssHfvXixYsADHjx/Hzp07df6oajQajB07Fu7u7oiLi8P169exZ88e+Pn5oaioCL169XrhPtL6zW9+g9OnTyM0NBQymQxbt25FTEwMbGxskJqaisLCQoSEhODRo0f44osvMHXqVBQVFUGlUolt1NTUwNfXF1evXsX48eMxe/ZsPHjwQIzpq6++Eo9hUlISNm3ahIqKCiQlJYltjBgx4qX7eOLECQQGBuLJkyeIjIyEi4sL8vPzsXbtWuzbtw8FBQWSq3xHjhxBUFAQjIyM8M4776BPnz44cuQIxo8fDxsbG4P3HQDMnTsXqamp+MlPfoKZM2cCePZPwfvvv48LFy5g7dq1OuucOnUKf/zjH+Hn54f4+HjcuHGjzfYVCgWSkpKwZs0aAMD8+fPFZa3fZx2N42XiZqxdxBh7oatXr5KxsTF5enrS3bt3JcuWLVtGAGjlypVi2ciRI8nU1JSqqqp02mq9fnl5OQGg2NhYvdvNyMggAJSRkSEpB0AA6Ne//rWk/MMPPyQApFAoaM2aNWJ5S0sLBQcHEwA6d+6cZJ2ysjKd7d68eZP69OlDbm5ukvKcnBwCQElJSXrjTUpKIgCUk5Mjll27do2MjY3J0dGRbty4IZY/evSIfHx8CABt3rxZLNfuEwC0fPlySfufffYZAaBly5bp3f7zYmNjCQANHDiQ7ty5I5afPn1a3E8+Pj703Xfficu2bdtGACgxMVHSVkxMDAGgDRs2SMqrqqqoX79+5ODgQA8fPhTL1Wo1tXWK7Wgfm5ubSaVSEQA6ePCgpP4nn3xCACguLk5Sv3///iQIAh0/flwsb2lpEfth6Ok/NzeXANCQIUOotrZWLK+pqaGBAwcSAMrLyxPLte8RALRx40aDtqGlVCpJqVR2Shwdrf+izyJjREScNDFmgLlz5+qcZLWam5vJwcGBvLy8xLKRI0eShYUF1dTUtNvuqyRNlpaW1NDQICnPy8sjAKRSqailpUWybPPmzR36Q5aYmEgASKPRiGUvkzR9/vnnBIBWrFihU//kyZMEgPz9/cUy7T5xdXWl5uZmSX3tsrfeesugPmiTpszMTJ1l/fv3JwCUm5srKX/69CmZmJiQr6+vWFZdXU0ymUwSZ2t/+ctfCABlZ2eLZYYkTYb2UXtcg4KCdNqqr68nW1tbksvl9PjxYyL6PmEIDQ3Vqa/RaEgmkxmcNMXFxREA2rZtm86yf/zjHzoJm/Y9MnLkSIPab629pKmjcXS0PidNzBB8e44xAxQUFAAADh06hCNHjugsNzExQXFxsfg6OjoaCxcuxLBhwxATEwM/Pz/4+PigZ8+enRaTm5sbevToISnTPmXn4eGhc7tLu+zmzZuS8rKyMixbtgxHjx5FZWUlHj9+LFl+8+ZNKJXKl47zwoULAKS3WbTGjRsHuVyO//znPzrLRowYASMj6bDLvn37AgBqa2s7FMPzt8WAZ/ujrKxMZ5lMJoOjo6NkP3399ddobm7G48eP9U63UFpaCgAoLi7GlClTOhSXIX1sbx9aWlpi1KhR+Ne//oWSkhIMHz5cHBM1YcIEnfpKpRL9+vUTx6G9SHvb1o5x03f8Ro8ebVD7hupoHC8bN2Pt4aSJMQPU1NQAAJYuXWpQ/Y8//hh2dnZIS0vDqlWrsHLlShgbGyMkJASrV68WB4C/Cn0JmLGx8QuXNTU1iWXXrl3DmDFj8ODBA/j5+SE0NBQ9e/aEkZERjh07htzcXJ0kqqMePHgAAHrHIAmCgF69eqGyslJnWXt9aG5u7lAML7OvWu8n7fE/efIkTp482eZ2GhoaOi2u1n1sbx8C3yfE2np1dXUAAEdHR731e/XqZXDS9ODBAxgZGcHBwUFvO4IgiNt9flln6mgcLxs3Y+3hpIkxA2j/uD148ABWVlYvrC8IAuLi4hAXF4d79+7h+PHj+PLLL7F9+3aUlpbi0qVLkMlkrzvsF1q9ejXu37+PLVu24L333pMsmz17tvgE2KvQ7ruqqiqdK1ZEhKqqqk69Avc6aONbsGABVq5c2WXb1/f0IwDcvn1bUk87TcWdO3f01m+rnba23dLSgurqap0k7M6dOyAivcevsyf27GgcLxs3Y+3hKQcYM4C3tzeA72/TdYSdnR3Cw8Oxbds2+Pv7o7CwENeuXQMAMXHq6JWTznL9+nUAEJ+Q0yIivVdUXiben/70pwCgdxqC06dP49GjR3pvn3Uno0ePhiAIyM/PN3idzjy27e3DhoYGnD17Fubm5hg0aBAAwNPTEwBw/PhxnfoVFRUdmnagvW1ryzrr+Mlksjb3V0fjeJNxsx8PTpoYM8CcOXNgbGyMxMREvY9O19bWimMogGcnZSKS1GlqahJv88jlcgCAjY0NBEF46blzXpX2ys/z8wwtX74cV65c0alva2sLAB2KNyYmBsbGxkhJSZGME3ry5Al++9vfAkC3/xqZ3r17IyoqCqdOncKf//xnnWMLPEsAW8/X9TL7qi3jx4+HSqXCgQMHcPjwYcmyJUuW4N69e3j33XfFKS98fHzg6uqKffv2SY4tEeH3v/99hxK52NhYAM/m5mp9O6uurg7JycmSOq/K1tYWd+/exaNHj145jjcZN/vx4NtzjBlg2LBhWLduHT788EMMGjQIwcHBUKlUqK+vR1lZGXJzczFjxgysX78eABAeHo6ePXti7NixUCqVaGpqwr///W8UFhYiMjJSTFYsLS0xevRo5OXlYfr06XBzc4ORkRGmT5/+SoOvDTV79mxkZGQgIiICUVFRsLOzQ0FBAc6fP4+QkBD885//lNQfPHgw+vTpg61bt8LMzAx9+/aFIAhITExsc+ZylUqFFStWYMGCBfDw8EBUVBQsLCyQnZ2NkpISTJ06VefWYHe0bt06lJSUYOHChdiyZQvGjRsHhUKBb7/9FmfPnkVpaSlu3bolDs739/fHjh07EBERgaCgIMjlcnh6eiI0NLTD2zYyMsKmTZsQGBiI4OBgvP3221AqlcjPz8exY8egUqmwfPlySf309HQEBwcjICBAnKfp6NGjuHXrFjw8PHDp0iWDtu3r64vExESkpqZi2LBhiIiIABEhKysL//3vfzF37lz4+vp2uE/6+Pv74+zZswgKCsKECRNgamoKX19f8acjcbzJuNmPSBc9tcfYD9KZM2coOjqa+vTpQyYmJmRvb08jR46kRYsWUVFRkVhv3bp1FBYWRkqlkuRyOdnZ2dGYMWMoLS2Nnjx5ImmzpKSEgoODSaFQkCAIkkf225tyQK1W68TX3mPTbU0XkJOTQ+PHjycrKytSKBQUHBxM586d0zt9ABFRQUEBqdVqsrKyEufjKS8vJyL9Uw5o7dmzR1zPzMyMhg8fTqtWraKmpiaD+9Be3/XRTjmgja+19qYEaOvR98bGRvrTn/5EXl5eZGFhQebm5uTq6krh4eG0efNmSV+amppo4cKF5OzsTMbGxpI+vWwfL126RJGRkWRvb08mJiakVCpp3rx5VF1drbedvLw88vX1JXNzc7K1taW3336bKioq2u17WzZu3EijR4+mHj16UI8ePWj06NF6p6940bQU7amvr6dZs2aRk5OTOC3C8+0YGkdH6/OUA8wQApGe68yMMcYYY0yCxzQxxhhjjBmAkybGGGOMMQNw0sQYY4wxZgBOmhhjjDHGDMBJE2OMMcaYAThpYowxxhgzACdNjDHGGGMG4KSJMcYYY8wAnDQxxhhjjBmAkybGGGOMMQNw0sQYY4wxZgBOmhhjjDHGDPB/Y37nnPrcwOwAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAk0AAAHPCAYAAABOau4WAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAACteklEQVR4nOzdd1xTV/8H8M9N2AooS0EZirNqxT1RwC3WWau11u3TqlVbravLalu1dXToo63W4rau1r0FlApuQFu3gqCiAoKArIzz+8Nf7kNMgBCCybn9vl8vXq33ntx8PyQmx3vPOVdgjDEQQgghhJASycxdACGEEEIID6jTRAghhBBiAOo0EUIIIYQYgDpNhBBCCCEGoE4TIYQQQogBqNNECCGEEGIA6jQRQgghhBiAOk2EEEIIIQagThMhhBBCiAGo00RMzs/PD4IglPjzww8/AACCgoIgCAIiIyPNWjOPRo0aBUEQsG7dOnOXUiJ9r79MJoOzszNatWqFhQsXIjc319xlllliYiIEQYCfn5+5S/nXunTpEsaMGYM6derA3t4eDg4O8PX1RYcOHfDxxx/j2LFjxT6WMYZt27Zh4MCB8Pb2hp2dHapWrYqAgADMnDkTSUlJxT5W83dv1KhRJda3bt06g94jjx8/ho2NDQRBQIsWLUpsC+h+xspkMjg6OqJmzZoIDg7Gxx9/jHPnzpV6HFJ2VuYugEhXhw4dUKdOHb37XnvttVdcTfmMGjUK69evR1hYWKkflES/Hj16oHr16gAApVKJ5ORkREdH48KFC9i0aROioqLg4uLyyurx8/PDvXv3kJCQQB0fDi1fvhwffvgh1Go1atSogeDgYFStWhWpqam4dOkSoqOjERkZiW7duuk89uHDhxgwYADOnTsndlQ6dOiA3NxcxMTEYPHixfjpp5+wdOlSTJo0qcKzbNiwAQqFAsCLjmB8fDyaNm1a6uOKfsbm5eUhLS0NsbGxiIyMxNKlS9G5c2f89ttvqF27doXW/6/CCDExX19fBoCFhYWV2vbevXvs2rVr7Pnz5xVfWDmMHDnS4EyvysOHD9m1a9dYZmamuUspEQAGgEVEROjsu3HjBnNzc2MA2PTp019pXZr3aUJCglGPT0hIYACYr6+vSesipYuPj2cymYwBYN9//z1TKpVa+1UqFTt58iT75ptvdB779OlTVrt2bQaANWvWjP39999a+xUKBVuyZAmTy+UMAPvxxx91jqH5PBg5cmSJdYaFhRn0HmnQoAEDwGrUqMEAsMmTJ5fYvqTPWLVazQ4cOMDq1q3LALBq1aqxu3fvlng8Yji6PEfMysfHBw0aNICDg4O5S+GOp6cnGjRoAGdnZ3OXYrR69erhvffeAwCcOHHCzNUQXuzYsQNqtRrt2rXDhx9+CLlcrrVfJpOhU6dO+OSTT3Qe+8EHH+Du3buoVasWwsPD0ahRI639VlZWmD59On788UcAwMcff4zr169XWJbTp0/j+vXrqFq1Kn777TcAwObNm1FQUGDU8QRBQO/evXHu3DnUrVsXjx8/xrhx40xZ8r8adZqIWRU3pqnoeJ2EhAS8++67qF69OmxtbeHv74/PPvusxA+Vixcv4p133oGPjw9sbW3h4uKCHj164ODBg2WqTzNuZf369QCA0aNHa40l+PLLL7XalXSZRzMOITExsdjtERER6N69O6pWrQp7e3s0b94cGzZs0Hu84sY0ffnll2JtqampmDRpEry9vWFjYwNvb29MnjwZmZmZeo/JGMNvv/2Gli1bwsHBAa6urujVq5d4qUMQBAQFBRnwmzNc0Ut2+iiVSvz6668ICgqCi4sLbG1tUatWLUyYMAHJyck67YvWmZubiy+++AINGzaEg4MD/Pz8xHEm9+7dAwDUqlVL6zU1ZnydUqnEd999h0aNGsHe3h5ubm546623iv2yPXfuHGbOnInWrVujevXqsLGxQbVq1fDGG2/g+PHjxT7Pjh070LVrV7i6usLa2hqurq547bXXMH78eFy+fFnvY3bu3ImePXvC3d0dNjY2qFGjBoYPH46rV6+WOScA3L9/H5MnT0bdunVhZ2cHZ2dndOjQAb/88gtUKpVOe83ve9SoUXj+/DnmzJmDOnXqwNbWFtWrV8fIkSPx4MGDMtXw+PFjAICHh0eZHnf37l38/vvvAIAlS5agSpUqxbadOHEimjZtCoVCgcWLF5fpecri119/BQC888476NatG+rUqYOnT5/izz//LNdxq1SpIo4dDQ8Px8WLF8tbKgF1moiFi4uLQ0BAAKKiotC5c2d06tQJKSkp+OabbzB06FC9j/nxxx/RunVrbNmyBa6urujbty8aNWqEyMhIhIaGYv78+QY/f+XKlTFy5Ej4+/sDeDGGYOTIkeJPQECAKWICAH777Td06dIFT58+Rc+ePREQEIDY2FiMHDlS/PAri+TkZDRv3hy7du1C69at0a1bN2RnZ2PFihXo3r27OIaiqEmTJmHs2LGIjY1F69at0b17dyQnJ6NTp07Yv3+/CVLq0gxYfflf/ACQnZ2Nbt26Yfz48bh48SJef/119O3bF7a2tvj555/RrFkzxMbG6j1ufn4+goKCsGzZMtSqVQt9+/ZF3bp1UadOHYwcORKVKlUCAAwaNEjrNdV04spiyJAh+Oyzz+Dl5YX+/fvD2dkZO3bsQKtWrRATE6PT/pNPPsHSpUuRn5+PFi1aoH///qhZsyb279+Pbt26iWc5ipo/fz7eeustnDx5Eo0bN8bgwYPRtm1byOVyrF27FuHh4VrtlUolhgwZgsGDByMyMhL16tVD//794e7ujs2bN6Nly5Y4fPhwmXKeP38eTZs2xYoVK1BYWIj+/fujffv2uHTpEt5//32EhoaisLBQ72OfPXuG9u3b4+eff8Zrr72GXr16gTGGDRs2oEOHDnj27JnBdfj4+AB4cXby77//Nvhx+/btg1qtRpUqVdC3b98S2wqCgHfffRcAsHfvXjDGDH4eQ2VnZ2PHjh0AgDFjxkAQBIwePRoAxLNO5dGrVy9xnGBJg+JJGZj58iCRoLKMaercubPe8S6aMQMA2Keffqo1ZuHKlSusUqVKDACLjo7Wetzhw4eZIAjMzc2NnTx5Umvf5cuXWc2aNRkAFhkZWaZMpY1pMmR8S3FjaDTbra2t2b59+7T2acZEODs7s9zcXINqmjt3rvi7GzVqFMvPzxf3JSUlieMmtmzZovW4PXv2MACscuXK7PTp01r7li5dKh6zc+fOxWbUR/O4oq+xQqFgCQkJbN68eUwQBObg4MAuXLig89hhw4YxAKxPnz7s8ePHWvu+//57BoDVrVtX6/0REREhPufrr7/OUlJS9NZlqjFNAJibmxuLj48X9ymVSjZ58mTxPVH0NWCMsYMHD7KHDx/qHDM6Opo5OTkxa2trdv/+fXF7fn4+s7e3Z5UrV2bXr1/XeVxiYiK7du2a1rZPPvmEAWBt2rTRGdOyY8cOJpfLWdWqVVlGRoZBefPz88Xf2fvvv88KCwvFfXfu3GF+fn4MAPvkk0+0Hqd5DwNgPXr0YM+ePRP3PX36lAUEBDAAbMGCBQbVwdiL97GjoyMDwKysrFjv3r3Zt99+y44dO1biGL93332XAWDBwcEGPc/JkyfF2ou+T0w1pmn16tUMAAsICBC33b9/n8nlciaTyVhiYqLex5XlM7Zr164MABs+fHipbUnpqNNETE7zF7q4n6JfuqV1mlq0aMHUarXOc7z//vsMAJs/f77W9jZt2jAAbOfOnXpr2759OwPABg0aVKZMr6LTNG3aNL2P0wwSPXXqlEE1aTpNNWvW1DvAftGiRQwAGzNmjNb2kJAQBoDNmTNHbx2tWrUqV6epuJ8ePXqwy5cv6zzu6tWrTBAE5uXlxbKysvQeu3fv3gyAVmezaKfp5d9ZUabsNP3www86+/Pz88UO6ubNmw0+7pw5cxgA9t///lfc9uTJE7ETaIj09HRmb2/P7OzstDpfRU2cOJEBYMuXLzfomBs3bmQAmJeXl04nkDHGdu7cyQAwR0dHlpeXJ27XdBwqVaqkt6P4+++/MwAsJCTEoDo0YmJixL8bRX9kMhlr3749+/3333Ue07NnTwaADR061KDnuH79unjcc+fOidtN1WnSfF69/Bpo3tdz587V+7iydJqGDh3KALBevXqV2paUjpYcIBWmuCUHGjRoYPAx+vTpA0EQdLY3bNgQALTGQqSlpeHcuXOwt7fHG2+8ofd4mvE40dHRBtfwqhRXc8OGDXH9+vUyj/vo0qWL3gH2+n53SqVS/J288847eo83bNgwnD9/vkw1FFV0yQHGGB4/fozY2FgcOXJEvExTrVo1sf3BgwfBGEOvXr3g6Oio95hBQUE4ePAgoqOj0adPH619Hh4eCAwMNLreshg5cqTONltbWwwZMgTLli1DZGQkhg0bprU/PT0dBw4cwN9//42MjAzxcumtW7cAADdu3BDburu7w8/PD5cvX8b06dMxduzYEpftiIiIQF5eHrp06YIaNWrobRMUFISVK1ciOjoaH3zwQakZNWO9hg4dCltbW539AwcORNWqVZGRkYGLFy+iQ4cOWvtbtmwJT09Pncfpez8aom3btvjnn39w8uRJHD58GOfPn8elS5fw7NkzREdHIzo6GocOHSrXOmasyCU5feO1yuPvv//G2bNnYWtrq/N3bsyYMTh48CDWrVuHL774AjKZ8SNp1Go1AOj9HCVlR50mUmHGjRtX7jWNNGMXXubk5ATgxbgVjYSEBDDGkJeXp/dDvajU1FTx/xctWqR3wO6SJUvg5uZmTNlGKUtWUx8vLS1N/HNxg9nLu5bR7NmzdQaR5+Xl4f3338eGDRvQo0cPXLx4UZwJdffuXQDA2rVrsXbt2hKPXfT1NEW9aWlp+Pjjj3W2N2jQALNnz9baVqVKlWIHFNeqVQvAi8HTRa1ZswYfffQRnj9/XmwNWVlZWn/esGED3nzzTSxbtgzLli2Di4sL2rRpg27duuHdd9/Veq9qfncnTpwo9ctS3+9OH02nRpPpZYIgoFatWsjIyNDbATL1+xt4MUsuODgYwcHBAF50bGJiYjB//nwcO3YM69evR2hoKAYPHgwA4u9IM5C8NE+ePBH/393dXfx/ze+0aKdKH81+fa+B5j3dv39/VK1aVWtf37594ebmhnv37uHEiRN615oyVFpaGgC80jXQpIw6TcSileVfWJp/UVWuXBmDBg0y+HGHDx/GyZMndbZ/+eWXJu00aeorTnn+NfkqjlcR/1K1t7fH8uXLsXHjRsTHx+Pw4cMIDQ0F8L/fV0BAQKkL/bVp00bvsY2Vk5MjzpgsqnPnzjqdJkMU/XK9ePEi3nvvPcjlcnz77bd444034OPjAwcHBwiCgNWrV+O9997T+UIODAxEYmIiDhw4gJMnTyI6OhpHjhzBoUOHMHfuXPz555/o0qULgP/97urUqaNzxudlZTnzWx6mfj/qI5fL0bFjRxw6dAitW7fGpUuXsHv3brHT1KJFC2zatAmXLl2CUqmElVXJX4GaSQrOzs5anUXNJIKSOr3Ai/cR8OIzqajCwkJs2rQJwIvB9R07dtR5rObM1tq1a43uNDHGxIkSTZo0MeoYRBt1mohkeHt7A3jx5f7bb78Z/CFtilu42NjYAHgxG0YfhUKBlJSUcj9PRXF1dYWtrS0KCgpw7949vZd+Xl4qwVScnJzg6uqKtLQ0XLt2Tew0aV7PDh06YMWKFRXy3MXx8/MzeLZUZmYmMjMz9Z5t0vzOatasKW7bsWMHGGOYPHkyZs6cqfMYzeU5fezt7fHmm2/izTffBPDiLNFnn32G1atXY8yYMeIyCprfXf369U12mx3NZT7NWSx9EhIStNqai1wuR0hICC5duiSeaQFeXAKfPn06nj17hj179pT4jyvGGDZu3AgA6Nevn9bnieas2e3bt0usQ/NavnyWbc+ePWJdd+/eLfF3unv3bjx9+tSoM0UHDx5ERkYGAKB79+5lfjzRRUsOEMnw8vLC66+/juzs7DJPpS6NplNU3FpCmjVwnj59qnVKX+PIkSPFPtYSWFtbo127dgCALVu26G2zdevWCnnuZ8+eIT09HYD2v8h79eoF4MV0b2Mu3ZSmtNe0LDRfrkUVFhZi27ZtAKB1WfLp06cAAF9fX53H5OfnY9euXQY/r7u7O7777jsAQFJSkvgF2aVLF9jY2CAyMlLv+9EYmgzbtm3T+3r8+eefyMjIgKOjo0H3TysPQzq0mnvHFe2w+vv746233gIAzJgxo9j1ygBg5cqVuHz5MmxsbHQ6tyEhIQCAy5cvF9txUigU2Lt3r1Z7Dc3aTLNmzQJ7MSFL70/r1q1RUFAgnpUqi2fPnuGjjz4CAHTr1s2ky6P8m1GniUjK119/DeDFIpT79u3T2c8Yw9mzZ3H06NEyHVfzwfvPP//o3W9tbY1OnToBAD777DOtS3Hx8fEGDbQ1tylTpgAAfvrpJ5w5c0Zr348//oizZ8+a/Dnz8vIwZcoUMMZgY2MjdpQAoFmzZhg0aBCSk5MxcOBAvWe6nj9/js2bNxs8RqWo0l7Tsvjqq6+01gtSq9WYNWsW7t+/D29vb60zGpqBz+vXr9c6M5mfn4+JEyeKZ2uKunfvHn799VedcU4AxPd51apVxfFB1apVw+TJk/H8+XO88cYbuHLlis7jCgoKsHfvXoNXux48eDB8fHzw8OFDTJs2TauzmZCQgOnTpwMAJk+eDDs7O4OOaaxPP/0UkydP1rugp1KpxC+//IKdO3cCgM56bv/973/h5+eHhIQEhISE6Lz+SqUSy5Ytw9SpUwEAq1ev1llDrGXLlujSpQsYYxg+fLjOWeS8vDxMmDAB9+7dg7u7O8aOHSvuS0pKEhcw1TeBoKgRI0YAKNuaTYwx8fLkrVu34OnpiTVr1hj8eFIyujxHJOWNN97Ajz/+iOnTp6Nv376oU6cO6tevD2dnZ6SmpiI+Ph5PnjzBrFmzynS6un///pg3bx5++ukn/P333/D29oZMJkPfvn3FRfK+/vprnDp1CmvWrMHJkyfx+uuv48GDB7hw4QKGDRuGyMhI8fKJJRowYAD+85//YPXq1ejYsSMCAwPh6emJK1eu4Nq1a/joo4/w/fffi2doymrRokXipSLGGJ48eYJLly7hyZMnkMlkWL58uc7Zl7CwMGRmZuLQoUOoX78+mjZtilq1aoExhsTERMTHx6OwsBDXrl3TmnlniEGDBiEiIgLDhw8XV2EHXpyBqF+/vsHH8fHxQYsWLdC8eXMEBQXB1dUV58+fx507d1CpUiVs2bJFqxMxevRo/Pjjj4iNjUWtWrUQGBgIuVyOqKgo5OXlYerUqTqLW2ZkZGD8+PGYOHEiAgICxPE1t27dQmxsLARBwOLFi7VuJ7Jo0SKkpKRgy5Yt4riw2rVrw8rKCvfv30dcXByeP3+OQ4cOGTSuydbWVlxdfNWqVTh48CDatm2L7OxshIeHIz8/Hz169MDcuXMN/t0ZKzc3FytWrMCKFStQo0YNNG3aFFWqVEF6ejri4+Px6NEjAMCcOXN0xgO5uLggKioK/fv3x8WLF9GkSRO0bNkS/v7+4g17U1NT4eTkhMWLFxfbsdm0aRO6d++Os2fPonbt2mjfvj08PT3x7NkzxMTEID09HS4uLti1a5fWpduwsDCo1Wq0atVK7EAXZ+jQoZg2bRri4+Nx8eJFnTN4v/76qzi8oKCgAGlpabh06ZJ4NjMoKAi//fab3rOaxEivaGkD8i9iysUtizuGZv2T4tZJuXLlCvvPf/7D6taty+zs7JiDgwOrXbs269GjB/vpp5/YgwcPyhaKMfbnn3+yDh06MEdHRyYIgt51VGJiYlj37t2Zk5MTs7e3Z02bNmUrV65karW61HWailsvqLjfRWnrNBW3xotmHSN96y2p1Wq2Zs0a1rx5c2ZnZ8eqVKnCunfvzk6dOsU2bNjAALC33367+F+SHihmfSY7OztWp04dNnr0aHbp0qViH69SqdiWLVtY7969WbVq1Zi1tTVzdXVljRs3ZqNHj2Z//vmn1kKLJeV7+bgLFy5kjRo1YnZ2dnoX4SxJ0bW5FAoF++abb1iDBg2Yra0tc3FxYYMGDWL//POP3sempqayiRMnMn9/f2Zra8u8vLzY8OHD2a1bt/S+t7OystgPP/zABgwYwOrWrcsqV67MKlWqxOrVq8dGjBihd2FQjYMHD7KBAweyGjVqMGtra1alShXWsGFDNnToULZly5Yy3yw7KSmJTZo0idWuXZvZ2NgwR0dH1q5dO7Zq1SqmUCh02pf2d9WYGx+npaWx33//nY0fP541b96ceXp6MisrK1apUiXWoEEDNmbMGJ2Fb1+meV/169ePeXl5MWtra/E94ODgwG7fvl1qHbm5uWz58uUsKCiIubq6MisrK+bk5MSaNWvG5syZo7OwatHPgRUrVhiUtX///gwAmzBhgrhN31p4lSpVYl5eXqxz585s+vTpWutKEdMRGKuAteEJIZIzZswYhIWFYenSpZg2bZq5yyHE5J49e4bg4GDExsaie/fu2Lt3b6nLl5B/FxrTRAgR/fPPPzrTqNVqNdasWYN169bBzs4Ob7/9tpmqI6RiOTs748iRI2jYsCGOHj2KIUOGWPQEDvLq0ZgmQoho8eLF2L59O5o1a4YaNWrg+fPnuHr1KhITEyGXy7Fy5Uq9qzoTIhXu7u44fvw41qxZA8YYLl68qHcdMPLvRJfnCCGiQ4cOYc2aNbh48SLS0tKgVCrh4eGBDh064MMPP0Tbtm3NXSIhhJgNdZoIIYQQQgxAY5oIIYQQQgxAY5rKQK1W4+HDh3B0dKQ7RhNCCCESwRhDdnY2vLy8SrwFF3WayuDhw4fiPZ0IIYQQIi3Jyclat955GXWaysDR0RHAi1+q5nYF5qTpGUvtzBfl4gvl4gvl4osUc1lipqysLHh7e4vf88WhTlMZaF5cJycni+g0KRQKREZGonfv3rC2tjZ3OSZDufhCufhCufgixVyWnKm0ThwNBCeEEEIIMQB1mgghhBBCDGCRnaacnBzMnTsXPXv2hIuLCwRBEO+OXtSoUaMgCILOj747dqvVanz33XeoVasW7Ozs8Prrr2Pr1q2vIE3FsrKS5hVWysUXysUXysUXKebiNZNFLm6ZmJiIWrVqwcfHB7Vr10ZkZCTCwsIwatQorXajRo3C77//jl9//VVru7OzM9544w2tbXPmzMGiRYswfvx4tGrVCnv27MGBAwewdetWDB061KC6srKy4OzsjGfPnlnEmCZCCCGElJ+h3+8W2dXz9PRESkoKqlevjgsXLqBVq1bFtrWyssLw4cNLPN6DBw+wdOlSTJo0CStWrAAAjBs3Dp07d8aMGTMwePBgyOVyk2Z4FdRqNdLS0uDm5lbiuhK8oVx8oVx8oVx8kWIunjNZZLW2traoXr26we1VKhWysrKK3b9nzx4oFApMnDhR3CYIAiZMmID79+8jJiamXPWai0qlQkxMDFQqlblLMSnKxRfKxRfKxRcp5uI5k0WeaSqL3NxcODk5ITc3F1WrVsXbb7+Nb7/9FpUrVxbbxMbGolKlSmjYsKHWY1u3bi3u79ixo86xCwoKUFBQIP5Z0zFTKBRQKBQAAJlMBrlcDpVKBbVaLbbVbFcqlSh6BVQul0MmkxW7XXNcDc11X6VSqbNd8/iij7G2toZardZ6MwqCACsrq2K3F1e7OTIVzaP5rxQyKZVKrVxSyaTJoyGVTEVzqdVqrePwnKkoKWXS/H0qmksqmYp+blCmisv0crbicN1p8vT0xMyZM9G8eXOo1WocPnwYK1euRHx8PCIjI8UXLiUlBdWqVdNZf8HT0xPAi5W+9Vm4cCHmzZuns/3o0aNwcHAAAPj4+KBZs2a4fPkykpKSxDb169dHgwYNcO7cOaSmporbAwIC4Ovri1OnTiE7O1vc3q5dO3h4eODo0aNab7Tg4GDY29vj4MGDWjX07t0bOTk5AIBjx44BePFGDQ0NRVpamtbZM0dHR4SEhCA5ORlxcXHidnd3d7Rv3x63bt3CjRs3xO3mzJSXl4eIiAgxl9QyAUBERITkMmlIKZPm8yM9PR3nz5+XTKbu3bsD+N/nhhQyhYaGIj09XSuXFDIV/ft07NgxylSBmXJzc2GIcg0Ev3fvHu7fv4+0tDQ4ODjA3d0dDRo0gJ2dnbGH1KEZ06RvILg+CxYswKeffqo1wLtLly5ISUnB1atXtdqq1WrI5XJMnToVP/zwg86x9J1p8vb2RlpamjhQzJw9eaVSiVOnTqF9+/ZiOymcwcjPz0d0dLSYSwqZlEollEqlmMve3l4SmTT/jY6ORufOnSGTySSRSfP/MTEx6Nixo9Y/uHjOpKnz5MmTWp8bvGeytrZGYWEhoqKixFxSyKRWq1FQUCB+blhbW1OmCsqUlZUFNze3UgeCl7nTFBERgXXr1uHEiRNISUnR2W9tbY2WLVtiwIABGDVqFFxdXctyeB1l7TTl5eWhcuXKGD16tDirrk+fPrh27Rru3Lmj1TY3NxeVKlXC7NmzsXDhwlKPTbPnCPl3YCoV1FfiwJ6mQXBxg6xJAAQOJ4sQQgxj8tlz27dvx9y5c3Hz5k0wxuDt7Y3+/fujWrVqcHFxQV5eHp4+fYobN27g4sWLiI6OxmeffYbhw4dj/vz54qWwimZvbw9XV1c8ffpU3Obp6YmIiAgwxrT+xajp9Hl5eb2S2kxNrVYjOTkZ3t7e3M1AKAnl4ovUcimjIlD432VgaU/EbYKbB2wmTYNVYLAZKzMNqb1eGpSLHzxnMqjT1LZtW5w7dw7NmzfH0qVLMXjwYNSoUaPY9gqFAqdOncKmTZuwfft2/P7779iwYQMGDBhgssKLk52djbS0NLi7u4vbAgIC8Ouvv+LatWt47bXXxO1nz54V9/NIpVIhLi4OXl5e3L3xSkK5+GLJuRhjBo9VAADEnAIWfQkGoOgISJb2BAXzZqNg9pdAu06lHsbBwcFibkT6Mkt+vcqDcvGD50wGdZpsbGxw/PhxhISEGHRQa2trdOnSBV26dMH333+PpUuXag3YMoX8/HwoFAqdOxJ/9dVXYIyhZ8+e4rZ+/frho48+wsqVK8V1mhhj+Pnnn1GjRg20b9/epLURQizD8+fPS71ruYYMwNUOTeFlawOZng6PmjE8+HIOGp2Oh1r34Vqys7O1ZvASQqTBoE7TqVOnjH6CKlWq4Kuvvirz41asWIHMzExxZtu+fftw//59AMDkyZORkZGBZs2a4e233xZvm3LkyBEcPHgQPXv2RL9+/cRj1axZEx9++CEWL14MhUKBVq1aYffu3YiKisLmzZu5XNiSEFK6spxl6lDFETXtbIvdLxMEeNvZokMVR0RlZhfbTvO81GkiRHosdsmBJUuW4N69e+Kf//jjD/zxxx8AgOHDh6NKlSro06cPjh07hvXr10OlUqFOnTpYsGABPv74Y51TfosWLULVqlXxyy+/YN26dahbty42bdqEYcOGvdJcpiQIAtzd3S32MoCxKBdfLDmXZmkQQ1S3tTZZu7I876tmya9XeVAufvCcySLvPWepaPYcIXwp05imK3HAZ9NKb/f1MqBJQIlNLHlMEyFEV4Xfey41NRVhYWE4f/48MjMz9S6HLggCTpw4YexTkFKoVCrcunULdevWldQlRsrFF0vOJQgCKlWqZFBb1qot8tw8tGbN6RzPvRrsW7XlevkBS369yoNy8YPnTEZ1mi5fvoyQkBBkZGSgpBNV9C+tiqVWq3Hjxg34+/tz98YrCeXii1RyCXI5bCZNQ8G82cW2sZn4EdcdJkA6r9fLKBc/eM5k1Fy/6dOn4+nTp/j000+RkJAAhUIBtVqt88PjzfgIIf9eVoHBsJ27CIKbh9Z2wb0abOcuksQ6TYQQ4xl1pikmJgb9+/fH/PnzTV0PIYSYlVVgMOTtO6Ew7gJiw0+gWUgX2AS05P4MEyGk/IzqNNnY2MDf39/UtZAykslk8PHx4W5xsNJQLr5IMZcgl8MqoCVsZTawev11SXWYpPh6AZSLJzxnMmr23IABA5CZman3LudSRrPnCCGEEOkx9PvdqG7ekiVL8Pfff2PJkiVGF0jKT6VSITY2VnJjxygXXygXXygXX6SYi+dMBl2eGzNmjM62xo0bY9asWfj5558REBCgt2cmCALWrl1b/iqJXmq1GklJSWjcuDF3MxBKQrn4Qrn4Qrn4IsVcPGcyqNO0bt26YvfdvXsXd+/e1buPOk2EEEIIkQqDOk0JCQkVXQchhBBCiEUzqNPk6+tb0XUQI8hkMtSvX5/LGQgloVx8oVx8oVx8kWIunjMZPHsuLCwMISEh/+oOFM2eI4QQQqTH5LPnxo4di9q1a6N27doYO3YsNm3ahAcPHpikWGIcpVKJ6OhoKJVKc5diUpSLL5SLL5SLL1LMxXMmgztNU6ZMQZMmTXDv3j2EhYVh5MiR8PHxQb169fDee+9h27ZtePz4cUXWSl7CGENqamqJ9//jEeXiC+XiC+XiixRz8ZzJ4BXBf/jhBwBARkYGTp48iYiICERGRuLvv//G7du38euvvwIAGjRogODgYAQHByMoKAiurq4VUjghhBBCyKtU5tuoVK1aFf3790f//v0BAE+fPkVkZKTYibp69SquX7+OVatWQSaTQaFQmLpmQgghhJBXrtxD111cXDBw4EAsX74cFy9exLZt29CoUSMwxqBWq01RIymGXC5HQEAAd4uDlYZy8YVy8YVy8UWKuXjOZNS95zSUSiXOnj2LiIgIREREICYmBgUFBWCMoUGDBujcuTNWrVplynrNimbPEUIIIdJTIfeeU6vVOHv2LBYtWoQePXqgSpUq6NSpE+bOnYsnT55g7Nix2LZtGx49eoSrV69KqsNkiZRKJcLDw7mcgVASysUXysUXysUXKebiOZPBY5p69+6N06dPIycnB4IgoEmTJhg3bhw6d+6MTp060YBvM2CMITs7m8sZCCWhXHyhXHyhXHyRYi6eMxncaTp8+DBkMhkGDhyIL774Ak2aNKnIugghhBBCLIrBl+f69u0LZ2dn7Nq1CwEBAahTpw7Gjx+PzZs34/79+xVZIyGEEEKI2ZVpIDhjDHFxceLyAlFRUXj27BkEQYCfnx+CgoLEH29v74qs2ywsbSC4Wq1GWloa3NzcuLyHT3EoF18oF18oF1+kmMsSMxn6/V6u2XNqtRqxsbEIDw9HZGQk/vrrL2RnZ4udqODgYHHRSymwtE4TIYQQQsqvQmbP6TxYJkOLFi0wY8YMHDhwABkZGdixYwcaNWqEhIQEhIWFlefwpBQKhQIHDhyQ3AKilIsvlIsvlIsvUszFc6Yyrwj+spSUFPFyXUREBO7evSvu43HhKt7wOGXTEJSLL5SLL5SLL1LMxWumMneaHj9+rHXblFu3bgF4Md5JEAQ0bdpUvPdcp06dTF4wIYQQQog5GNxpmjhxIiIjI3Hjxg0A/+skNW7cGEFBQQgODkbnzp1RtWrVCiuWEEIIIcRcDB4Irhnh3rBhQ/FMUlBQ0L9qUUtLGwiuWSDM0dERgiCYuxyToVx8oVx8oVx8kWIuS8xk6Pe7wWeatm7diqCgIFSrVs0kBRLTsLe3N3cJFYJy8YVy8YVy8UWKuXjNZPDsuSFDhlCHycIolUocPHiQ2wF1xaFcfKFcfKFcfJFiLp4zGdRpSkpKKvcTPXjwoNzHIIQQQggxF4M6TXXr1sWkSZOQkJBQpoMrFAps3boVjRo1wtq1a40qkBBCCCHEEhjUafr222+xbds21KlTB507d8by5ctx/vx5vQtT3b9/H7t27cL7778PT09PDB8+HL6+vhg2bJjJiyeEEEIIeVUMnj2XmZmJZcuWYe3atUhJSYEgCJDJZKhSpQqqVKmC/Px8PH36FPn5+S8OLAjo0aMHpk+fjpCQkAoN8apY4uw5pVIJKysri5mBYAqUiy+Uiy+Uiy9SzGWJmSrs3nMqlQqHDh3CiRMnEB0djfv37yM9PR329vZwd3dHkyZN0LlzZ/Tr1w++vr7lDmJJLLHTZGnTNk2BcvGFcvGFcvFFirksMVOF3XtOLpejT58++P7773H27Fk8ePAA+fn5yMjIwM2bN7Fr1y5MmTJFch0mS6RUKhEREcHlDISSUC6+UC6+UC6+SDEXz5nKdcNeQgghhJB/C+o0EUIIIYQYgDpNnLOyKvM9l7lAufhCufhCufgixVy8ZirzQPB/M0sbCE4IIYSQ8quwgeCvQk5ODubOnYuePXvCxcUFgiBg3bp1JT5GoVDgtddegyAIWLJkic5+tVqN7777DrVq1YKdnR1ef/11bN26tYISvBpqtRpPnjyBWq02dykmRbn4Qrn4Qrn4IsVcPGeyyE5TWloa5s+fj2vXrqFp06YGPWb58uUl3u7l008/xaxZs9CtWzcsX74cPj4+GDZsGH7//XdTlf3KqVQqxMTEQKVSmbsUk6JcfKFcfKFcfJFiLp4zWWSnydPTEykpKbh37x4WL15cavsnT55g/vz5mDVrlt79Dx48wNKlSzFp0iSsXr0a48ePx759+xAYGIgZM2Zw+cIRQggh5NWyyE6Tra0tqlevbnD72bNno379+hg+fLje/Xv27IFCocDEiRPFbYIgYMKECbh//z5iYmLKXTMhhBBCpK1cw9fPnTuH8+fPIzMzU+/ZGkEQ8Pnnn5fnKQyqYf369fjrr7+KXVk0NjYWlSpVQsOGDbW2t27dWtzfsWNHnccVFBSgoKBA/HNWVhaAF+OnNPfdk8lkkMvlUKlUWtdnNduVSiWKjrWXy+WQyWTFbn/5fn6aGQYvLwKm2V65cmWtfdbW1lCr1VqvhyAIsLKyKnZ7cbWbK5NSqdTKJZVMRXNJJZPmv5UrV4YgCJLJpPl/R0dHMMa0jsNzJk2dL39u8J7J2toajDGtXFLIpFartT43KFPFZdJ3L119jOo0PX36FP3798fp06dR0uS7iu40McYwefJkDBkyBO3atUNiYqLedikpKahWrZpOp8rT0xMA8PDhQ72PW7hwIebNm6ez/ejRo3BwcAAA+Pj4oFmzZrh8+bLWmKr69eujQYMGOHfuHFJTU8XtAQEB8PX1xalTp5CdnS1ub9euHTw8PHD06FGtN1pwcDDs7e1x8OBBrRp69+6NvLw85OTk4OjRowBevFFDQ0ORlpamdfbM0dERISEhSE5ORlxcnLjd3d0d7du3x61bt3Djxg1xu7kzRUREiL9nqWUCgPDwcMllsrKygpWVFZ48eSKpTKGhoZLM1KRJE/FzQyqZMjMztT4PpZCp6GfE0aNHKVMFZsrNzYUhjFpyYNSoUdiwYQOCgoIwcuRI1KxZs9g1Fzp37lzWw2u5cOECWrVqhbCwMIwaNUprX1hYGCZNmoQbN27A29sbiYmJqFWrFhYvXoyPP/5YbNelSxekpKTg6tWrWo9Xq9WQy+WYOnUqfvjhB53n1nemydvbG2lpaeKURHP25NVqNe7du4caNWpAJntxpVUKZzAKCwvx4MEDMZcUMimVSqjVajGXra2tJDIBEHP5+fkBgCQyaXI9evQINWrU0Do2z5k0x0lKSoKnp6f4ucF7JmtrayiVSiQlJYmfG1LIpFaroVAoxM8NuVxOmSooU1ZWFtzc3EpdcsCoM0379+9H69atceLECbPdbC8rKwtz5szBjBkz4O3tXWJbe3t7rc6PRn5+vrhfH1tbW9ja2upst7a2hrW1tdY2uVwOuVyu07a4zmRx218+bknb1Wo1rly5Am9vb639MplM/DAsqrjtxdVujkyaOl/OxXsma2trKBQKMVdJtfOUCYBWLmtra0lkAl7kiouLg5eXl972PGYCXuSKj49HjRo1dPbxmgl4ceWhLJ+HPGTS1K7JpXkuymT6TMVl0KnfoFYvycvLQ6dOncx6d+IlS5agsLAQQ4YMQWJiIhITE3H//n0AQEZGBhITE1FYWAjgxWW4R48e6VxKTElJAQB4eXm92uIJIYQQwh2jOk0BAQHFjh96VZKSkpCRkYFGjRqhVq1aqFWrFgIDAwEACxYsQK1atcTLcQEBAcjNzcW1a9e0jnH27FlxPyGEEEJISYzqNM2dOxd79+7FmTNnTF2PwaZMmYI///xT6+eXX34B8GLM1Z9//olatWoBAPr16wdra2usXLlSfDxjDD///DNq1KiB9u3bmyVDeQmCAHd3d7Oe8asIlIsvlIsvlIsvUszFcyaDBoJv2LBBZ9uePXuwf/9+vPPOO2jevHmxA6dGjBhhVGErVqxAZmYmHj58iFWrVmHgwIFo1qwZAGDy5MlwdnbWeUxxA8EBYObMmVi8eDH+85//oFWrVti9ezcOHDiAzZs3Y9iwYQbVRPeeI4QQQqTH4O93ZgBBEJhMJtP6EQRB60fffplMZsjh9fL19WUA9P4kJCTofUxCQgIDwBYvXqyzT6VSsQULFjBfX19mY2PDGjVqxDZt2lSmmp49e8YAsGfPnhkTyeSUSiW7du0aUyqV5i7FpCgXXygXXygXX6SYyxIzGfr9btDsubCwsHL24crOmDFTfn5+xa4bJZPJMGfOHMyZM6eclVkOtVqNGzduwN/fX+8sAl5RLr5QLr5QLr5IMRfPmQzqNI0cObKi6yCEEEIIsWgWee85QgghhBBLY1Snaf/+/Rg4cGCxtx95+PAhBg4ciEOHDpWrOFIymUwGHx8fvQuC8Yxy8YVy8YVy8UWKuXjOZNRtVHr16oWHDx8iPj6+2DbNmjVDjRo1sH///nIVaElo9hwhhBAiPYZ+vxvVzYuPj0ebNm1KbNOmTRutm+8R01OpVIiNjdW6X48UUC6+UC6+UC6+SDEXz5mM6jQ9ffoUHh4eJbZxc3NDWlqaUUURw6jVaiQlJWndtFAKKBdfKBdfKBdfpJiL50xGdZrc3d1x48aNEtvcuHEDLi4uRhVFCCGEEGJpjOo0derUCfv27cPly5f17o+Pj8fevXvRuXPnchVHCCGEEGIpjOo0zZo1CwDQsWNHzJ8/HzExMUhKSkJMTAzmzZuHwMBAcTFJUnFkMhnq16/P5QyEklAuvlAuvlAuvkgxF8+ZjJo9BwC7du3CyJEjkZeXp7WdMYbKlStjw4YN6N+/vylqtBg0e44QQgiRngqdPQcAgwYNwt27d7Fw4UIMHDgQXbp0waBBg/Ddd9/hzp07kuswWSKlUono6GgolUpzl2JSlIsvlIsvlIsvUszFcyaDbqNSHA8PD8ycOdNUtZAyYowhNTW12Pvt8Ypy8YVy8YVy8UWKuXjOxN8FRUIIIYQQMyhXp2nz5s3o1q0b3N3dYWtrC3d3d3Tr1g1btmwxVX2EEEIIIRbBqMtzKpUKb731Fnbv3g3GGOzs7ODl5YXHjx/jxIkTCA8Px65du7Bjxw4uR8fzQi6XIyAgAHK53NylmBTl4gvl4gvl4osUc/GcyagezU8//YQ///wTHTp0wOnTp5Gbm4uEhATk5uYiOjoaHTt2xO7du7F8+XJT10uKkMlk8PX1lVzHlHLxhXLxhXLxRYq5eM5kVMXr169HvXr1cOLECbRr105rX9u2bXH8+HHUq1cPYWFhJimS6KdUKhEeHs7lDISSUC6+UC6+UC6+SDEXz5mM6jTdvHkTffv2hbW1td791tbWeOONN3Dz5s1yFUdKxhhDdnY2lzMQSkK5+EK5+EK5+CLFXDxnMqrTZGNjg+fPn5fY5vnz57CxsTGqKEIIIYQQS2NUp6lZs2bYvn07Hj58qHd/SkoKtm/fjubNm5erOEIIIYQQS2HUbVT27duHfv36oXr16pg+fTo6d+6MatWq4fHjx4iMjMSyZcvw+PFj7NmzB3369KmIus3C0m6jolarkZaWBjc3Ny4H1BWHcvGFcvGFcvFFirksMZOh3+9G33tu2bJlmD17NlQqldZ2xhisrKzw7bff4qOPPjLm0BbL0jpNhBBCCCm/Cr/33LRp03D9+nV8+eWX6N+/P0JCQtC/f3/Mnz8f169fl1yHyRIpFAocOHAACoXC3KWYFOXiC+XiC+XiixRz8ZypXPeeq127Nj7//HNT1UKMwOOUTUNQLr5QLr5QLr5IMRevmUxyMVGpVCIjI4PbXwIhhBBCSGmM7jSpVCp8//33aNq0Kezs7ODm5gY7Ozs0bdoUP/zwA3WgCCGEECIpRg0Ez8nJQY8ePXDmzBnIZDJ4e3uLs+eSk5OhVqvRrl07HDlyBJUqVaqIus3C0gaCaxYIc3R0hCAI5i7HZCgXXygXXygXX6SYyxIzVehA8C+++AIxMTF4++23cefOHdy9excxMTG4e/cu7ty5g6FDhyI6OhpffPGF0QGIYezt7c1dQoWgXHyhXHyhXHyRYi5eMxnVadq+fTtatmyJTZs2wcfHR2ufj48PNm/ejBYtWmDbtm0mKZLop1QqcfDgQcldCqVcfKFcfKFcfJFiLp4zGdVpSk9PR9euXUts07VrVzx9+tSoogghhBBCLI1Rnaa6deviyZMnJbZJTU1FnTp1jCqKEEIIIcTSGNVpmjp1KrZt24Z//vlH7/4rV67g999/x4cfflie2gghhBBCLIZRs+dOnTqFpUuX4ujRoxg5ciQ6duwozp6LiorChg0b0KNHD0ybNk3nsZ06dTJJ4eZgibPnlEolrKysLGYGgilQLr5QLr5QLr5IMZclZqrQe8/JZDIIggDNQ4uG1retqJfvVccTS+w0Wdq0TVOgXHyhXHyhXHyRYi5LzGTo97tRt1H54osvLCbov5lSqURERAR69+4Na2trc5djMpSLL5SLL5SLL1LMxXMmozpNX375pYnLIIQQQozDVCqoL19CtZv/QF2zOlhASwhyubnLIhJUrhv2EkIIIeakjIpA4X+XgaU9QRMAyiN7oHLzgM2kabAKDDZ3eURiynXD3tjYWMycORN9+/bVWrfp3r172L59O63T9ApYWUmz30u5+EK5+CKVXMqoCBTMmw2Wpr0EDkt7goJ5s6GMijBTZaYllderKF4zGTUQHABmzpyJpUuXag381gzyTkxMRJ06dbB06VJMnTrVdNWamaUNBCeEEClRq9VIS0szsLEK9lPGQniaDn0jbBkA5uqGvB9/BWQlX6pzc3ODTFaucwiEcxV677mwsDAsWbIEffr0weXLlzFnzhyt/X5+fmjdujX27t1rzOGJgdRqNZ48eQK1Wm3uUkyKcvGFcvHFknOlpaWhWrVqBv282bA+ZMV0mABAACBLT8ObDeuXeiyDO2pmYMmvl7F4zmRUp2nlypVo2LAhdu3ahcaNG8PGxkanTYMGDXDr1q1yF0iKp1KpEBMTw/UyDvpQLr5QLr5IJVd1W8NmXRnazlJZ8uvFGMPz58/L9JPz7BlSw4/h75U/ISv6FJ5nZZX5GEZeIDMJoy4qXr16FePHjy/xmmS1atVKvdWKPjk5OVi8eDHOnj2Lc+fOISMjA2FhYRg1apRWuzVr1mDTpk24fv06MjMz4eXlhaCgIMydOxd+fn46x127di2WLFmChIQEeHt7Y8qUKZg8eXKZ6yOEEFIx3Nzc8PjxY4Payq5eAb7+pNR2K3/fjhWvNSn1eUnZ5ebmonLlyga37+teFd/V80FNO1u0AYC/juF+fgFm3kzC3tQMg4+Tk5ODSpUqlb1gEzCq02RlZYXCwsIS2zx8+LBMv0yNtLQ0zJ8/Hz4+PmjatCkiIyP1touNjUWtWrXQt29fVK1aFQkJCVizZg3279+P+Ph4eHl5iW1/+eUXvP/++xg0aBCmTZuGqKgoTJkyBbm5uZg1a1aZaySEEGJ6MpkMHh4eBrVlrkHIc/PQGQRelOBeDa6BQbT8gAXo614Vm5ro3o/Wy9YGm5rUwfArt8vUcTIXozpNTZo0QXh4OFQqFeR63oy5ubk4fvw4WrRoUeZje3p6IiUlBdWrV8eFCxfQqlUrve1Wrlyps61///5o2bIlNmzYgNmzZwMA8vLy8OmnnyI0NBQ7d+4EAIwfPx5qtRpfffUV/vOf/6Bq1aplrtMSCIJgUSuqmgrl4gvl4otUcglyOWwmTUPBvNnFtrGZ+BH3HSZLfr0cHByQk5NTekOVChg/DEhP1dkl+/9cW0LaA6u3AAa8Xg4ODmWu1VSMGtM0ZswY3Lx5E++//z4KCgq09mVlZWHUqFF49OgRxo8fX+Zj29raonr16saUJV6Wy8zMFLdFREQgPT0dEydO1Go7adIkPH/+HAcOHDDquSyBlZUVQkJCuJ26WRzKxRfKxRcp5bIKDIbt3EUQ3LTPTgnu1WA7d5Ek1mmy5NdLEARUqlSp1B+7u7f0dpi0pKXC7u4tg45nzg6kUa/CmDFjcPz4caxduxbbtm1DlSpVAACtW7fGtWvX8Pz5c4waNQpvvvmmKWvVKz09HSqVCklJSZg/fz4AoEuXLuL+2NhYAEDLli21HteiRQvIZDLExsZi+PDheo9dUFCg1SnMysoCACgUCigUCgAvTifL5XKoVCqtmQCa7UqlUmvQmlwuh0wmK3a75rgamr8oSqVSZ7tarca9e/dQo0YNcbqstbU11Gq11qBBQRDE9vq2F1e7OTIBQGFhIR48eCDmkkImpVIJtVot5rK1tZVEJgBiLs0/WqSQSZPr0aNHqFGjhtaxec6kOU5SUhI8PT3Fzw2uM7XtCPv2naCIu4i0WzdQ1b8u5E0CgP9vy2Um/O+zXKFQiJ8bcrmcy9dJlWrYODVF6mOo//85XnWml7MVx+iu65YtWxAcHIwVK1bg77//BmMMFy5cQMOGDTFlyhS89957xh66TGrUqCF2bFxdXfHTTz+hW7du4v6UlBTI5XKd6+Q2NjZwdXXFw4cPiz32woULMW/ePJ3tR48eFU8P+vj4oFmzZrh8+TKSkpLENvXr10eDBg1w7tw5pKb+r4cdEBAAX19fnDp1CtnZ2eL2du3awcPDA0ePHtV6AwYHB8Pe3h4HDx7UqqF3797Izs7GlStXcOXKFQAv3qihoaFIS0tDTEyM2NbR0REhISFITk5GXFycuN3d3R3t27fHrVu3cOPGDXG7OTPl5eUhIuLFgnRXrlyRXCYAuHbtmuQyAYC3tzcyMjIkk8nKygpKpRLW1tY4f/68ZDJ1794d8fHxiI+Pl0ym0NBQpHnWxPn7KcDDx8DDI9LIVOQz4sqVK9xmqnr/HgwZrHPu5m1k5CrMkik3N9eACsuxuGVReXl5yMjIgJOTk1GDv4ujGdOkb/acRkREBPLz83Ht2jVs2rQJb731ljieCQDGjh2LrVu36v2F+Pj4oHnz5ti9e7feY+s70+Tt7Y20tDRx8Stz9uQVCgUOHTqEbt26iTc9lMJZmby8PBw7dkzMJYVMSqUSCoVCzOXg4CCJTADEXL179xbr4T1T0Vw9e/bUWviQ50waBw8e1Prc4D2TtbU1CgoKcPjwYTGXFDKp1Wrk5+eLnxs2NjYWlUkulyM3N7f0TCoVrCaOgJBe/HpYzM0dyv+uF8c0lZTJ0dERarXapJmysrLg5uZW6uKWJrlIam9vD3t7e1McqsyCg19cs+7Vqxf69euHxo0bo3Llyvjggw/E2oqb6Zefn19i3ba2trC1tdXZrvkiL0oul+sdFF/cdejithd3x2d92zXXdV+uRyaT6V3dtrjtxdVujkxFtxfNJZVMRf9fSpk0pJqpLFktPZPmS0Lf5xivmQBoDVEw5POQl0xFPw81z2UpmZ4/fw5HR0e9bV9WdPacZvA3AKj/vyMzPDwaew1c+kGz5IApMxX3urxMUuvG+/v7o1mzZti8ebO4zdPTEyqVSmfNqMLCQqSnp2stTcAbQRDg7u5ukbMqyoNy8YVy8YVy8UUqufamZmD4ldt4WKB9EuNBQSE3yw0AJjrTZEny8vK0LqkFBAQAeHGpr3fv3uL2CxcuQK1Wi/t5ZGVlhfbt25u7DJOjXHyhXHyhXHyx5FwGLzlQBFMqkRd7AULmU9hW84R3o9expYzLQnC35IC5KZVKZGTo9krPnTuHK1euaM2UCwkJgYuLC1atWqXVdtWqVXBwcEBoaGiF11tRVCoVrl+/bpHL65cH5eIL5eIL5eKLJecydMmBoj+VnZ1RNTAIqQ2awL51O1RycirzMbhbcqCirVixApmZmeLMtn379uH+/fsAgMmTJ4MxBm9vbwwZMgSNGjVCpUqVcOXKFYSFhcHZ2Rmff/65eCx7e3t89dVXmDRpEgYPHowePXogKioKmzZtwjfffAMXFxezZDQFtVqNGzduwN/fX++1XV5RLr5QLr5QLr5IMRfPmSyy07RkyRLcu3dP/PMff/yBP/74AwAwfPhweHl5Ydy4cYiIiMDOnTuRl5cHLy8vvP322/jss8907j03ceJEWFtbY+nSpdi7dy+8vb3x/fffY+rUqa8yFiGEEEI4ZlSnacOGDahWrRp69Ohh6noAAImJiaW2+eGHH8p0zPHjxxu1QjkhhBBCCGDkmKaxY8fi8OHDpq6FlJFMJoOPj4/eqac8o1x8oVx8oVx8kWIunjMZtbilj48P+vXrh+XLl1dETRYrKysLzs7OpS5+RQghhBB+GPr9blQ3r2/fvjh27JjOzXrJq6VSqRAbG2uRsyrKg3LxhXLxhXLxRYq5eM5kVKfpm2++QaVKlTBw4ED8888/pq6JGEitViMpKUlrKXkpoFx8oVx8oVx8kWIunjMZNRC8WbNmKCgoQFxcHA4fPgw7Ozt4eHjorJ0gCALu3LljkkIJIYQQQszJqE6TWq2GjY0NfHx8tLa/PDzKBPcCJoQQQgixCEZ1mgxZEoBUPJlMhvr163M5A6EklIsvlIsvlIsvUszFcyajZs/9W9HsOUIIIUR6KnT2XFFXr17FH3/8gY0bN5b3UKSMlEoloqOjoVQqzV2KSVEuvlAuvlAuvkgxF8+ZjO40nT9/HgEBAWjSpAkGDx6MUaNGiftOnToFBwcH7N271xQ1kmIwxpCamiq5sWOUiy+Uiy+Uiy9SzMVzJqM6Tf/88w9CQkKQkJCAjz76CL169dLaHxgYCDc3N+zYscMkRRJCCCGEmJtRnaa5c+cCAC5evIglS5agVatWWvsFQUC7du1w/vz58ldICCGEEGIBjOo0nTx5EoMGDUKdOnWKbePj44OUlBSjCyOlk8vlCAgIgFwuN3cpJkW5+EK5+EK5+CLFXDxnMmrJgezsbHh4eJTYJi8vj8sl0nkik8ng6+tr7jJMjnLxhXLxhXLxRYq5eM5k1Jkmb29vXLlypcQ2ly5dgr+/v1FFEcMolUqEh4dzOQOhJJSLL5SLL5SLL1LMxXMmozpNffr0wdGjR3H8+HG9+7dv344zZ86gf//+5amNlIIxhuzsbC5nIJSEcvGFcvGFcvFFirl4zmTU5blPPvkEO3fuRO/evTFy5Eg8evQIALBy5UrExMRg69at8PPzw7Rp00xaLCGEEEKIuRjVaXJ3d8fJkyfx7rvvYu3ateL2Dz74AADQpk0bbN26Fc7OzqapkhBCCCHEzMp9G5W4uDicOXMGT58+hZOTE9q0aaOzBIFUWNptVNRqNdLS0uDm5sblPXyKQ7n4Qrn4Qrn4IsVclpjJ0O93uvdcGVhap4kQQggh5ffK7j2Xnp6O8PBw/PnnnwgPD0d6enp5D0kMpFAocODAASgUCnOXYlKUiy+Uiy+Uiy9SzMVzJqPGNAFAYmIipk6digMHDmiNgBcEAX369MEPP/wAPz8/U9RISsDjlE1DUC6+UC6+UC6+SDEXr5mM6jTduXMHHTp0wJMnT1C3bl106NAB1apVw+PHjxEdHY29e/fizJkziI6ORu3atU1dMyGEEELIK2dUp2nWrFlITU3Fzz//jPHjx0MQBHEfYwyrV6/GxIkTMWvWLLppLyGEEEIkwaiB4FWrVkVQUBD+/PPPYtv069cPp06dQkZGRrkKtCSWNhBcs0CYo6OjVseVd5SLL5SLL5SLL1LMZYmZKnQguEqlQqNGjUps07hxY7r33Ctgb29v7hIqBOXiC+XiC+XiixRz8ZrJqE5T8+bN8c8//5TY5p9//kHLli2NKooYRqlU4uDBg9wOqCsO5eIL5eIL5eKLFHPxnMmoTtM333yDQ4cO4ddff9W7f/Xq1Thy5Ai+/vrrchVHCCGEEGIpjBoIfuLECQQHB+O9997D0qVLtWbPnT59Gjdv3kSPHj1w/PhxrZv6CoKAzz//3GTFE0IIIYS8KkZ1mr788kvx/2/cuIEbN27otDl8+DAOHz6stY06TYQQQgjhlVGz506ePGn0E3bu3Nnox5qbJc6eUyqVsLKyspgZCKZAufhCufhCufgixVyWmMnQ73ejzjTx3PGRmry8PDg6Opq7DJOjXHyhXHyhXHyRYi5eM1nG7YWJUZRKJSIiIricgVASysUXysUXysUXKebiORN1mgghhBBCDECdJkIIIYQQA1CniXNWVkYNS7N4lIsvlIsvlIsvUszFayajZs/9W1na7DlCCCGElF+F3nuOWAa1Wo0nT55ArVabuxSTolx8oVx8oVx8kWIunjNRp4ljKpUKMTExkrsxMuXiC+XiC+XiixRz8ZyJOk2EEEIIIQYwaCSWTCYzatVOQRC4XIeBEEIIIeRlBp1p6tSpk85PkyZNwBiDTCaDr68vWrduDV9fX8hkMjDG0KRJEwQGBpa5oJycHMydOxc9e/aEi4sLBEHAunXrtNqo1WqsW7cOffv2hbe3NypVqoTGjRvj66+/Rn5+vt7jrl27Fg0bNoSdnR3q1q2L5cuXl7k2SyMIAhwdHS1mGXpToVx8oVx8oVx8kWIunjMZNXvu/v376NChAwIDA7FgwQL4+PiI+5KSkjBnzhycPn0af/31F2rWrFmmYycmJqJWrVrw8fFB7dq1ERkZibCwMIwaNUpsk5OTA0dHR7Rt2xZ9+vSBh4cHYmJisH79enTq1Anh4eFaL8Yvv/yC999/H4MGDUKPHj0QFRWFjRs3YtGiRZg1a5bBtdHsOUIIIUR6DP5+Z0YYMmQIa9OmTYlt2rRpw4YOHVrmY+fn57OUlBTGGGPnz59nAFhYWJhWm4KCAnb69Gmdx86bN48BYMeOHRO35ebmMldXVxYaGqrV9p133mGVKlViT58+Nbi2Z8+eMQDs2bNnZUhUcVQqFUtMTGQqlcrcpZgU5eIL5eIL5eKLFHNZYiZDv9+NGgh+/PhxdOnSpcQ2ISEhOH78eJmPbWtri+rVq5fYxsbGBu3bt9fZPmDAAADAtWvXxG0RERFIT0/HxIkTtdpOmjQJz58/x4EDB8pco6VQqVSIi4vjcgZCSSgXXygXXygXX6SYi+dMRi3JmZ+fj5SUlBLbPHz4EHl5eUYVZaxHjx4BANzc3MRtsbGxAICWLVtqtW3RogVkMhliY2MxfPhwvccrKChAQUGB+OesrCwAgEKhgEKhAPBikLxcLodKpdJac0KzXalUghW5AiqXyyGTyYrdrjmuhmbV1JcH1FtZWYmPL/oYa2trqNVqrTejIAiwsrIqdntxtZsjU9E8mv9KIZNSqdTKJZVMmjwaUslUNJdardY6Ds+ZipJSJs3fp6K5pJKp6OcGZaq4TC9nK45RnaYWLVrg999/x/jx49GuXTud/dHR0di2bRvatm1rzOGN9t1338HJyQm9evUSt6WkpEAul8PDw0OrrY2NDVxdXfHw4cNij7dw4ULMmzdPZ/vRo0fh4OAAAPDx8UGzZs1w+fJlJCUliW3q16+PBg0a4Ny5c0hNTRW3BwQEwNfXF6dOnUJ2dra4vV27dvDw8MDRo0e13mjBwcGwt7fHwYMHtWro3bs3cnJyAADHjh0D8OKNGhoairS0NMTExIhtHR0dERISguTkZMTFxYnb3d3d0b59e9y6dQs3btwQt5szU15eHiIiIsRcUssEvDj7KbVMGlLKpPngT09Px/nz5yWTqXv37gD+97khhUyhoaFIT0/XyiWFTEX/Ph07dowyVWCm3NxcGMKogeB//fUXunTpApVKhTfeeAMdO3aEh4cHnjx5gqioKOzfvx9WVlY4ceIEOnToUNbDiy5cuIBWrVrpDATXZ8GCBfj000+xcuVKTJgwQdw+duxYbN26Ve8vxMfHB82bN8fu3bv1HlPfmSZvb2+kpaWJA8XM2ZNXKpU4e/YsWrRoIbaTwhmM/Px8XLx4UcwlhUxKpRJKpVLMZW9vL4lMmv9evHgRbdu2hUwmk0Qmzf/HxsaiZcuWWhNLeM6kqfPs2bNo3ry52Ib3TNbW1igsLMS5c+fEzw0pZFKr1SgoKBA/N6ytrSlTBWXKysqCm5tbxQwEZ4yx48ePs9q1azNBEJggCEwmk4n/X7t2bXb8+HFjDy0qbiD4y37//XcmCAIbO3aszr5JkyYxuVyu93Hu7u5lGqxuaQPBCSGEEFJ+hn6/G32b4S5duuD27dv466+/EB8fj2fPnsHZ2RlNmzZFx44dX9n6C8eOHcOIESMQGhqKn3/+WWe/p6cnVCoVnjx5onWJrrCwEOnp6fDy8noldVYElUqFW7duoW7dupDL5eYux2QoF18oF18oF1+kmIvnTOW6jYogCAgMDMQHH3yATz/9FB988AECAwNfWYfp7NmzGDBgAFq2bInt27eLpwSLCggIAPDiUl9RFy5cgFqtFvfzSK1W48aNG1ze9LAklIsvlIsvlIsvUszFc6Zy33vu6tWr+OOPP7Bx40ZT1GOwa9euITQ0FH5+fti/fz/s7e31tgsJCYGLiwtWrVqltX3VqlVwcHBAaGjoqyiXEEIIIZwz+vLc+fPnMX78eFy5ckXc9u677wIATp06hZ49e+L3339H3759y3zsFStWIDMzU5zZtm/fPty/fx8AMHnyZMhkMvTo0QMZGRmYMWOGzlpL/v7+4qw+e3t7fPXVV5g0aRIGDx4srgi+adMmfPPNN3BxcTEqPyGEEEL+XYzqNP3zzz8ICQmBTCbDRx99hOvXr+PQoUPi/sDAQLi5uWHHjh1GdZqWLFmCe/fuiX/+448/8McffwCAuKZScnIyAGD27Nk6jx85cqTWUggTJ06EtbU1li5dir1798Lb2xvff/89pk6dWubaLIlMJoOPjw9ksnKfMLQolIsvlIsvlIsvUszFcyajlhx48803ceTIEcTGxqJOnTqYN28e5s+frzVdcMiQIYiPj8f169dNWrA50b3nCCGEEOkx9PvdqG7eyZMnMWjQINSpU6fYNj4+PqWuGk7KR6VSITY2lsul6EtCufhCufhCufgixVw8ZzKq05Sdna2zwvbL8vLyuPyF8EStViMpKYnLGQgloVx8oVx8oVx8kWIunjMZ1Wny9vbWGgCuz6VLl+Dv729UUYQQQgghlsaoTlOfPn1w9OhRHD9+XO/+7du348yZM+jfv395aiOEEEIIsRhGzZ775JNPsHPnTvTu3RsjR47Eo0ePAAArV65ETEwMtm7dCj8/P0ybNs2kxRJtMpkM9evX53IGQkkoF18oF18oF1+kmIvnTEbNngOAu3fv4t1339W6o7lGmzZtxI6TlNDsOUIIIUR6KnT2HADUrl0bp0+fxqVLl7By5Up8/fXX+Omnn3D27FnExMRIrsNkiZRKJaKjo3XuJM07ysUXysUXysUXKebiOZPRK4JrBAQEcH3/Np4xxpCamgojTxZaLMrFF8rFF8rFFynm4jkTfxcUCSGEEELMwOgzTdnZ2Vi7di3i4+Px8OFDKBQKnTaCIODEiRPlKpAQQgghxBIY1Wk6f/48evXqhYyMjBJPrwmCYHRhpHRyuRwBAQGQy+XmLsWkKBdfKBdfKBdfpJiL50xGzZ5r3749zp07h4ULF+Ltt9+Gp6cnl+HLimbPEUIIIdJTobPnYmNjMXToUMyYMQM1a9b8V3SYLJFSqUR4eDiXMxBKQrn4Qrn4Qrn4IsVcPGcyqtPk4uICd3d3U9dCyogxhuzsbC5nIJSEcvGFcvGFcvFFirl4zmRUp6l///4IDw/n8mZ7hBBCCCHGMKrTtHDhQlhbW+Odd97BgwcPTF0TIYQQQojFMfo2KpcuXULXrl3x7NkzVK1aVe/AKUEQcOfOnXIXaSksbSC4Wq1GWloa3NzcuLyHT3EoF18oF18oF1+kmMsSMxn6/W5Up+nEiRN44403kJ+fD2tra3h4eMDKSv/qBQkJCWU9vMWytE4TIYQQQsqvQmfPzZo1C4wxbNu2Dfn5+UhOTkZCQoLeH1JxFAoFDhw4oHdhUZ5RLr5QLr5QLr5IMRfPmYxa3PLq1asYPnw4Bg8ebOp6SBnxOGXTEJSLL5SLL5SLL1LMxWsmo840ubu7w97e3tS1EEIIIYRYLKM6Te+88w4OHTqEvLw8U9dDCCGEEGKRjBoIXlhYiCFDhuDp06dYsGABmjZtisqVK1dEfRbF0gaCaxYIc3R0lNR9/igXXygXXygXX6SYyxIzGfr9btSYJs2lOcYYOnXqVGw7QRC4vW7JC6leJqVcfKFcfKFcfJFiLl4zGXV5LjAwEJ06dULnzp3RqVOnYn8CAwNNXS8pQqlU4uDBg5LrmFIuvlAuvlAuvkgxF8+ZjDrTFBkZaeIyCCGEEEIsm2UsxUkIIYQQYuGo00QIIYQQYgCjZs+FhIQYdnBBwIkTJ8pclKWyxNlzSqUSVlZWFjMDwRQoF18oF18oF1+kmMsSM1Xo7LnSxjQJggDGmMX8MqQsLy8Pjo6O5i7D5CgXXygXXygXX6SYi9dMRl2eU6vVen8yMzMRHh6ONm3a4M0330RhYaGp6yVFKJVKREREcDkDoSSUiy+Uiy+Uiy9SzMVzJpOOaXJyckJQUBCOHDmCc+fO4ZtvvjHl4QkhhBBCzKZCBoI7OjqiV69eCAsLq4jDE0IIIYS8chU2e04mkyElJaWiDk/+n5WVUcPSLB7l4gvl4gvl4osUc/GayajZc6W5e/cu2rZtCxcXF1y/ft3UhzcbS5s9RwghhJDyq9DZc2PGjNG7XalU4sGDB/jrr7+gUCgwf/58Yw5PDKRWq5GWlgY3NzfIZNJZcoty8YVy8YVy8UWKuXjOZFSnad26dSXur1+/PqZPn45x48YZc3hiIJVKhZiYGPTu3Zu7N15JpJiLqVRQxF3A3fATcA7pApuAlhDkcnOXZRJSfL0AysUbysUPnjMZ1WlKSEjQu10mk6FKlSpcrr1AiKEYY8jNzTX8ATGnwNb8F0J6KpoAUB7ZA6WrOzB+EtCuk8GHcXBwoLXPCCHEjIzqNPn6+pq6DkK4kZubi8qVKxvUtq97VWxqUgcAtDo86rQnwMK5GH7lNvamZhh0rJycHFSqVKnsBRNCCDEJk54Xy8rKwrFjxxAVFYUKGF9OXiIIAhwdHSV39kEquWQAvqvn8+L/X8qi+fO39Xy4vwGkVF6vl1EuvlAufvCcyajZc2vWrMGmTZuwe/duVK1aFQAQHx+PXr164fHjxwCAdu3a4ejRo3BwcDBtxWZEs+cIUIbLc1figM+mld7u62VAk4BSm9HlOUIIqRiGfr8b9Y/cjRs3oqCgQOwwAcD06dPx5MkTjB49Gr1790ZMTAxWrVpV5mPn5ORg7ty56NmzJ1xcXCAIgt6B5+fOncPEiRPRokULWFtbl/plsnbtWjRs2BB2dnaoW7culi9fXubaLI1arca9e/egVqvNXYpJSSZXRrpp21koybxeL6FcfKFc/OA5k1Gdpps3b6Jp06bin9PT0xEREYFx48bh119/xb59+9CqVSts3ry5zMdOS0vD/Pnzce3aNa3neNnBgwfx66+/QhAE1K5du8Rj/vLLLxg3bhwaNWqE5cuXo127dpgyZQq+/fbbMtdnSVQqFeLi4qBSqcxdiklZei7NmKbSfnoNG27Q8XoNG27Q8co0+PwVsvTXy1iUiy+Uix88ZzKq05SZmQl3d3fxz1FRUQCAgQMHits6duyIxMTEMh/b09MTKSkpuHfvHhYvXlxsuwkTJuDZs2e4cOECunXrVmy7vLw8fPrppwgNDcXOnTsxfvx4bNiwAe+88w6++uorZGQYNgiXkLI6nZmN+/kFUBdzBVzNGJLzC3A6M/sVV0YIIcQYRnWaXF1dtW6RcuLECcjlcnTo0EHcxhiDQqEo87FtbW1RvXr1UttVq1YN9vb2pbaLiIhAeno6Jk6cqLV90qRJeP78OQ4cOFDmGsm/m4ODA3Jyckr9ycrJQc0vF0ImCNDXbZIJAry/XIgsA46Vk5MjqfGBhBDCI6OWHHj99dexZ88efPTRR7Czs8OWLVvQoUMHrenQiYmJ8PT0NFmhxoqNjQUAtGzZUmt7ixYtIJPJEBsbi+HD9V9GKSgoQEFBgfjnrKwsAIBCoRA7hDKZDHK5HCqVSuv6rGa7UqnUmkkol8shk8mK3f5yR1Nzfx6lUql3u6urq9Y+a2trqNVqrdOegiDAysqq2O3F1W6uTEqlUiuXJWaysbExLFOXnlDa2KJw5TIgLfV/dbpXg/WED4F2gRaTyZjXSfNfV1dXCIJgca+TsZk0/+/u7q7zD0CeM2nqdHNz09rOeyZra2swxrQ+N6SQSa1Wa30eUqaKy2ToSR6jOk0zZ85E165dtcYcTZv2v1lCarUaf/31F0JCQow5vEmlpKRALpfDw8NDa7uNjQ1cXV3x8OHDYh+7cOFCzJs3T2d70VmBPj4+aNasGS5fvoykpCSxTf369dGgQQOcO3cOqan/+7IMCAiAr68vTp06hezs/12WadeuHTw8PHD06FGtN1pwcDDs7e1x8OBBrRp69+6NvLw8pKen4+jRowBevFFDQ0ORlpaGmJgYsa2joyNCQkKQnJyMuLg4cbu7uzvat2+PW7du4caNG+J2c2eKiIgQf8+SyBTQEhFDxqLqw2TY5OZA5eiM9uPfR2p6OmKKtOcqU5HXCXjx3rOyssKTJ0/4fZ30ZAoNDZVkprp164qfG1LJlJmZqfV5KIVMRT/3jh49SpkqMJOhY0aNvmHvgQMHEBYWBgAYOnQo3nzzTXFfVFQUpkyZgk8++QSDBw825vAAgAsXLqBVq1YICwvDqFGjim33wQcf4L///a/etaHGjh2LrVu36v2F+Pj4oHnz5ti9e7fe4+o70+Tt7Y20tDRxSqI5e/IqlQo3btyAv78/5P9/Sw5LPCtT1n+dFBQU4M6dO2IuKWRSKpVQqVRiLjs7O0lkAiDmatCgAQRBkEQmTa7ExET4+/trteU5k+Z5b968iVq1aomfG7xnsra2hkKhwM2bN8XPDSlkUqvVKCwsFD83NP84oUymz5SVlQU3N7eKuWEvAISGhiI0NFTvvsDAQPGymLnZ29ujsLBQ7778/PwSx0XZ2trC1tZWZ7u1tTWsra21tsnlcvEDqCjNm8fQ7S8ft6TtjDHcvn0b9erV09ovk8n03s+nuO3F1W6OTJrnfTkX75k02zW5Sqqdt0wAtF4vqWQCIP6jRF97XjNpOhd16tTR2cdrJo2yfB7ykEnTEdDk0jwXZTJ9puIy6NRvUCuOeXp6QqVS4cmTJ1rbCwsLkZ6eDi8vLzNVRgghhBCeGH2mCXixwOT58+eRmZmpd70FQRDw+eefl+cpyi0gIADAi0t9vXv3FrdfuHABarVa3E8IIYQQUhKjOk1Pnz5F//79cfr06RLvMWcJnaaQkBC4uLhg1apVWp2mVatWwcHBodhLjDyQyWTw8fHRe0qTZ5SLL5SLL5SLL1LMxXMmozpN06ZNw19//YWgoCCMHDkSNWvWLPZ6oTFWrFiBzMxMcWbbvn37cP/+fQDA5MmT4ezsjHv37mHjxo0AXpw1AoCvv/4aAODr64t3330XwIsxTV999RUmTZqEwYMHo0ePHoiKisKmTZvwzTffwMXFxWR1v2pyuRzNmjUzdxkmR7n4Qrn4Qrn4IsVcXGdiRnB1dWVt2rRharXamIeXytfXlwHQ+5OQkMAYYywiIqLYNp07d9Y55urVq1n9+vWZjY0N8/f3Z99//32Z63/27BkDwJ49e2aClOWnVCrZpUuXmFKpNHcpJkW5+EK5+EK5+CLFXJaYydDvd6POjeXl5aFTp04Vdsf1xMREMMb0/vj5+QEAgoKCim0TGRmpc8zx48fj+vXrKCgowO3bt/Hhhx9yf8d4tVqNpKQkLm96WBLKxRfKxRfKxRcp5uI5k1GdpoCAAKPuK0cIIYQQwiujOk1z587F3r17cebMGVPXQwghhBBikYwavf3o0SOEhoaic+fOeOedd9C8efNiV9AcMWJEuQokxZPJZKhfvz6XMxBKQrn4Qrn4Qrn4IsVcPGcy6jYqMpkMgiBoLTfw8vggxpjOLRV4l5WVBWdn51KXWSeEEEIIPwz9fjfqTJPmnnPEvJRKJc6dO4fWrVubdMkHc6NcfKFcfKFcfJFiLp4zGVXtyJEjTV0HMQJjDKmpqSUuMMojysUXysUXysUXKebiORN/FxQJIYQQQsygXOfFEhMTsXnzZsTFxSErKwtOTk4ICAjAO++8I66nRAghhBAiBUYNBAeAH3/8ETNnzoRSqdQ5xWZtbY3vvvsOU6dONUmRlsLSBoKr1WokJyfD29uby1kIxaFcfKFcfKFcfJFiLkvMZOj3u1Gdpv3796Nv375wc3PDRx99hODgYHh6euLRo0eIiIjAsmXLkJ6ejr1793J9Q9yXWVqniRBCCCHlZ+j3u1FdvGXLlsHFxQWXLl3CnDlz0LZtW/j6+qJNmzaYPXs2Ll68iKpVq2LZsmVGByClUyqVCA8Ph1KpNHcpJkW5+EK5+EK5+CLFXDxnMqrTdOnSJQwZMgQ1a9bUu9/b2xtvvfUWLl68WK7iSMkYY8jOzuZyBkJJKBdfKBdfKBdfpJiL50xGdZoKCwtRqVKlEttUrlwZhYWFRhVFCCGEEGJpjOo01atXD/v27Sv21JpSqcT+/ftRr169chVHCCGEEGIpjOo0jRgxAjdu3ECPHj10LsFduHABvXr1wo0bN2gRzAoml8vRrl07yOVyc5diUpSLL5SLL5SLL1LMxXMmo2bPqVQqDBo0CHv37oUgCHBwcICHhweePHmC3NxcMMbQr18/7Nq1y2KmE5oCzZ4jhBBCpKdCZ8/J5XLs3r0b69atQ1BQEGxsbJCUlAQbGxsEBwdj/fr1+PPPPyXVYbJECoUCBw4cgEKhMHcpJkW5+EK5+EK5+CLFXDxnKteK4CNGjMCIESNMVQsxAo9TNg1BufhCufhCufgixVy8ZqJTQYQQQgghBjCq07R//34MHDgQDx8+1Lv/4cOHGDhwIA4dOlSu4gghhBBCLIVRA8F79eqFhw8fIj4+vtg2zZo1Q40aNbB///5yFWhJLG0guGaBMEdHRwiCYO5yTIZy8YVy8YVy8UWKuSwxU4UOBI+Pj0ebNm1KbNOmTRvExcUZc3hSBvb29uYuoUJQLr5QLr5QLr5IMRevmYzqND19+hQeHh4ltnFzc0NaWppRRRHDKJVKHDx4kNsBdcWhXHyhXHyhXHyRYi6eMxk1e87d3R03btwosc2NGzfg4uJiVFGEEEKIVDDGkJubW/bHKZV4fv4MHGPP47mrM6xfbw6UYUFIBwcHi7n8JRVGdZo6deqEXbt24fLly3j99dd19sfHx2Pv3r0YOHBguQskhBBCeJabm4vKlSuX6TF93aviu3o+qGlnizYA8Ncx3M8vwMybSdibmmHQMXJyckq9TywpG6Muz82aNQsA0LFjR8yfPx8xMTFISkpCTEwM5s2bh8DAQMhkMsyZM8ekxRJCCCFS19e9KjY1qQMvWxut7V62NtjUpA76ulc1U2XEqNlzALBr1y6MHDkSeXl5WtsZY6hcuTI2bNiA/v37m6JGi2GJs+eUSiWsrKwkdQqWcvGFcvGFcr16Zbo8p1IB44cB6anFt3FzB1ZvKfVSnaVenrPE18rQ73ejVwQfNGgQAgMDsW7dOpw/fx7Pnj1DlSpV0Lp1a4wcORLu7u7GHpqUQV5eHhwdHc1dhslRLr5QLr5QrldLEASDL5Op4i4iv6QOEwCkpcLu7i3IA1qYoDrzsNTXqjTlWhHcw8MDM2fOxI4dO3D06FFs374dH3/8MXWYXhGlUomIiAguZyCUhHLxhXLxhXJZNvbUsFnnhrazRDy/VnQbFUIIIcRCCC5uJm1HTKtcN+wlhBBCSMnKNKapdl3A1b3UMU35tesCz5+XeChLHdPEM+o0cc7KSpovIeXiC+XiC+V6tcq65IBm9hwAyIp0etT/P29reHg09jo7l3ocS15ywFJfq9IYPXvu38jSZs8RQgixfM+fPy/XOk0ayfkFmEXrNFWICp89R8xPrVYjLS0Nbm5ukMmkMzyNcvGFcvGFcr16Dg4OyMnJKfPjmFKJ3Ivn8Px+Elzq1IN346bYUsYVwS2RJb9WpeGrWqJFpVIhJiYGKpXK3KWYFOXiC+XiC+V69TRLDpT1p7KzM6oEBiHe2Q32rdqikpNTmR5vqeOZLPm1Ko1RnaaHDx+aug5CCCGEEItmVKfJz88P/fr1w/79+6FWq01dEyGEEEKIxTGq09S2bVvs27cP/fr1g4+PD7744gskJiaauDRSGkEQ4OjoaLGnYI1FufhCufjBVCqwy7HwS7oDdjkWjMPLI8WR4usFSDMXz5mMnj138+ZNrFmzBhs3bsSTJ08gk8nQtWtXjB8/Hv369eN2OmFJaPYcIcQSlGndH42YU2Br/guh6Po/ru7A+ElAu04GHYLW/SFSZej3e7mXHFAqldizZw9+/fVXHDt2DIwxuLm5YdSoURg7dizq1atXnsNbFEvrNKnVaiQnJ8Pb25u7GQgloVx8oVyvXlmnsJe67s+V2wZNY7fkKeyW/HqVhxRzWWImQ7/fy12tlZUVBg0ahEOHDiExMRFz586FTCbDkiVL0LBhQwQHB2P79u2g5aBMT6VSIS4ujssZCCWhXHyhXJZNBuC7ej4v/v+ls0SaP39bz4f7qdRSeb1eJsVcPGcy2d8TtVqNixcv4vz580hNTQVjDN7e3jh9+jTefvttNG3aFLdu3TLoWDk5OZg7dy569uwJFxcXCIKAdevW6W177do19OzZE5UrV4aLiwveffddpKbqLj+vVqvx3XffoVatWrCzs8Prr7+OrVu3licyIYSYhb29PR4/fmzYT8Rx1LSz1ekwacgEAd52tngccbzUY9nb27/ipIRYlnIPPLp79y5+/fVXrF+/Ho8ePRLPPL3//vsIDg7Go0eP8P333+P777/HhAkTcPz48VKPmZaWhvnz58PHxwdNmzZFZGSk3nb3799Hp06d4OzsjAULFiAnJwdLlizBlStXcO7cOdjY2IhtP/30UyxatAjjx49Hq1atsGfPHgwbNgyCIGDo0KHl/TUQQsgrk5eXh2rVqhnUdnA1F4Q1rlNqu4lD38KOx09LbGPJl+cIeRWM6jQpFArs2rULa9aswcmTJ6FWq1GrVi0sWLAAo0ePhoeHh9i2evXq+Pbbb5GVlYUNGzYYdHxPT0+kpKSgevXquHDhAlq1aqW33YIFC/D8+XNcvHgRPj4vTj+3bt0a3bp1w7p16/Cf//wHAPDgwQMsXboUkyZNwooVKwAA48aNQ+fOnTFjxgwMHjwY8jKssmopBEGAu7u75AZmUi6+UC7L9qhAYdJ2lkoqr9fLpJiL50xGdZq8vLzw9OlTyOVy9OvXD++99x66d+9e4mN8fX2Rl5dn0PFtbW1RvXr1Utvt2rULffr0ETtMANC1a1fUq1cP27dvFztNe/bsgUKhwMSJE8V2giBgwoQJGDZsGGJiYtCxY0eDarMkVlZWaN++vbnLMDnKxRfK9eqV6bYcKhUwfhiQrjtsQeTmjkP3jgGl/OPRUm/LAVj261UeUszFcyajOk0ODg6YOnUqxo4dC09PT4MeM3HiRLz99tvGPJ1eDx48wJMnT9CyZUudfa1bt8bBgwfFP8fGxqJSpUpo2LChTjvNfn2dpoKCAhQUFIh/zsrKAvDiTJtC8eJfZTKZDHK5HCqVSmuhT812pVKpNQheLpdDJpMVu11zXA3N0g1KpVJnu0qlwo0bN+Dv7y+eKbO2toZardYaYCcIAqysrIrdXlzt5sgEvPi937lzR8wlhUxKpRIqlUrMZWdnJ4lMAMRcDRo0gCAIksikyZWYmAh/f3+ttpaSSTP8wJBM6gkfQvn1pyiO1fsfwtbJqdRMmuewpNcJePG5p1AocPPmTfFzw1Jep/JkUqvVKCwsFD83rKysKFMFZXo5W3GM6jQlJiaW+bSak5OTSafpp6SkAIDeTpunpyeePn2KgoIC2NraIiUlBdWqVdOpWfPY4m4Ls3DhQsybN09n+9GjR8V/cfn4+KBZs2a4fPkykpKSxDb169dHgwYNcO7cOa2B6QEBAfD19cWpU6eQnZ0tbm/Xrh08PDxw9OhRrTdacHAw7O3ttTqBANC7d29kZ2fj9u3buH37NoAXb9TQ0FCkpaUhJiZGbOvo6IiQkBAkJycjLi5O3O7u7o727dvj1q1buHHjhrjdnJny8vIQEREBALh9+7bkMgEv/v5ILRMA1KtXDxkZGZLJZGVlBaVSCScnJ5w/f577TO69BqJ+1HHY5WSJbfIrO+FGYFfkq2UIAbjLBPzvcy81NVXr85DX16lopqKfEbdv36ZMFZjJ0HXPyr1OU0XTjGkKCwvDqFGjxO1RUVHo1KkTtm3bhrfeekvrMV988QW++uorZGRkoEqVKujSpQtSUlJw9epVrXZqtRpyuRxTp07FDz/8oPPc+s40eXt7Iy0tTewAmrMnr1AocOjQIXTr1g3W1tYA+D/TxFQq5F08hyunItGkUxCsmzaHjUTOyigUChw7dgzdunWDg4ODJDIBEHP17t1brIf3TEVz9ezZU2stGZ4zMZUK7J94xIafEP9+CRI5K1NQUIDDhw+Ln4dSyKRWq5Gfny9+btjY2FCmCsqUlZUFNze3UtdpMupM05gxY0ptI5PJ4OTkhPr166NPnz6oUaOGMU9VLM3U16KdGo38/HytNvb29ga1e5mtrS1sbW11tltbW4udFA25XK53MHlxK6MX3V7S6r6FhYV6txcUFODZs2fIzMxEQUGB+GYoqf3LNGfLiqu9PJmAF7meP3+u0664GgtPHhdXLG4CAEf2QOHqDkUxKxbry1S07orIpPHy61+W7Zr/l8lkehd2K267JWfSkGqmsmS16EzW1lC83hyP7z9Ci+atdB7DZab/p6n75c9n3jMV/Qex5rkok+kzFZdB5zgGtXrJunXrxEtd+k5UCYKgtX3y5Mn44osv8NlnnxnzdHppLq1pLtMVlZKSAhcXF7HD4+npiYiICDDGtC7RaR7r5eVlsrqMkZubW6bVfYEXC2x1qOKI6rbWWPvhZJzOzEZZb51c0dOHy5Kr6IrFRV8jddoTYOFcg1csBix3WrRMJoOPj4/FrIBrKpSLL5SLL1LMxXMmoyq+c+cO+vTpAw8PDyxYsAAnT57E9evXcfLkSSxYsADVqlVD3759cfbsWaxevRpeXl6YO3cutm3bZrLCa9SoAXd3d1y4cEFn37lz5xAQECD+OSAgALm5ubh27ZpWu7Nnz4r7edLXvSqudmiKQy0aIqxxHRxq0RBXOzRFX/eq5i7NKP+WFYvlcjmaNWum919HPKNcfKFcfJFiLp4zGfU9tG3bNpw9exZxcXGYPXs2AgMDUa9ePQQGBmL27Nm4dOkSzpw5g4iICIwbNw6nT59G5cqVsXLlSpMWP2jQIOzfvx/JycnithMnTuDmzZsYPHiwuK1fv36wtrbWen7GGH7++WfUqFHD7FMfNdOHDfo5dhBbXq+LGnbalw1r2tliy+t1kXPsoMHHqujpw4bmyor5y6AVi7Ni/rKIXMZSqVSIjY3l8tYBJaFcfKFcfJFiLp4zGXV5bu3atXjrrbeKXZG2evXqGDx4MNasWYOZM2eiRo0a6NOnDw4cOGDwc6xYsQKZmZnizLZ9+/bh/v37AF5c7nN2dsYnn3yCHTt2IDg4GFOnTkVOTg4WL16MJk2aYPTo0eKxatasiQ8//BCLFy+GQqFAq1atsHv3bkRFRWHz5s1m7+0KgmDQ5SSmUiHv15VgAIqbuyisXQX74O4QLKAHb2guZW4O9I9O0mabmwMrC7zsZii1Wo2kpCQ0btzY7O85U6JcfKFcfJFiLp4zGdVpun//vt4B0kXZ2dmJnRzgxTRBzcBrQyxZsgT37t0T//zHH3/gjz/+AAAMHz4czs7O8Pb2xsmTJzFt2jTMnj0bNjY2CA0NxdKlS3XqW7RoEapWrYpffvkF69atQ926dbFp0yYMGzbM4JrMTX0lDiztSYltWOpjqK/EQR7Q4hVVVX6Ci5tJ2xFCCCEVwahOU40aNbB792589dVXsLOz09mfn5+P3bt3a82Ye/LkCapWNXzMTWJiokHtGjVqhCNHjpTaTiaTYc6cOZgzZ47BNVga9jTNpO0qWkmzArXUrgu4upe6YnF+7bqAntl4L3NwcOByeX5CCCGWzahO09ixY/Hpp5+iY8eO+OKLL9ChQwe4uroiPT0dp0+fxvz583H37l189dVX4mOioqLQtGlTkxUuJQZ3LhwMm4lW4FAZBRbQuTB29lzRsU3q/5+FOTw8GnudnQ06liXPnqtfvz6XM0ZKQrn4Qrn4IsVcPGcyanFLlUqF0aNHY9OmTeKXrkwmExeaYoxh2LBh2LBhA2QyGR4/foxFixahZ8+e6NGjh2kTvEJZWVlwdnYudfGrsnr+/LlBnQsZgKsdmsLL1kbvoGk1Y3hQUIhGp+MNWn6gojsXhubS6OteFd/V80HNIoPck/MLMOtmksHLDQCW22kihBBimQz9fi/XiuDh4eHYuHEjLl++jKysLDg5OaFp06Z455130KVLF2MPa7HM3WkCDDgjY0HrGRl8Bq3oY5RKPL9wFvdiL6F+23awer1ZqTcRfZmlXp5TKpU4d+4cWrduXexCazyiXHyhXHyRYi5LzGTo97tR1Z46dQpOTk4ICQlBSEiI0UWSF8p0x3IAiDkFtua/WmOAZO4ewLhJ2KJn5eySnrciGTp77mW2nYJxJicPzdq0N3iVVh4wxpCamqp3QVieUS6+UC6+SDEXz5mM6jQFBwfjvffeM/m6S/9WZe5cdO0FFtwdhXEXEBt+As1CusAmoKVFLDNACCGESJVRnSYPDw+9s+bIqyPI5ZD9/z2kZK83pw4TIYQQUsGMGrrerVs3REZGcnlqTUrkcjkCAgK4WxysNJSLL5SLL5SLL1LMxXMmowaCP3z4EO3atUP37t3x7bffwsXFpSJqszgVNRCcEEIIIeZj6Pe7UWeahg8fjipVquC3335DjRo18NprryE4OFgcGK75keIMOkuiVCoRHh4OpVJp7lJMinLxhXLxhXLxRYq5eM5k1JimyMhI8f8LCgpw/fp1XL9+XaedJU77lhLGGLKzsyV3mZRy8YVy8YVy8UWKuXjOZFSnSbOIJSGEEELIvwV/a5gTQgghhJhBuVYEB16sKn3z5k08f/4cgYGBpqrLIlnaQHC1Wo20tDS4ublxeQ+f4lAuvlAuvlAuvkgxlyVmqtCB4ACQmJiIfv36oWrVqmjVqhWCg4PFfadPn8Zrr72mNfaJmJ5MJoOHh4fFvOlMhXLxhXLxhXLxRYq5eM5kVMVJSUlo27YtDh48iH79+qFdu3ZaA7ratGmDtLQ0bN261WSFEl0KhQIHDhyAQqEwdykmRbn4Qrn4Qrn4IsVcPGcyqtM0d+5cZGRk4OTJk9i5cye6deumtd/KygqBgYE4ffq0SYokxeNxyqYhKBdfKBdfKBdfpJiL10xGdZqOHDmCAQMGoH379sW28fX1xYMHD4wujBBCCCHEkhjVaXr69Cn8/PxKbMMYQ0FBgTGHJ4QQQgixOEZ1mqpVq4Zbt26V2ObKlSvw8fExqihiGCsrKwQHB8PKyqjltiwW5eIL5eIL5eKLFHPxnMnoG/bu378fly9f1rs/KioK4eHh6N27d7mKI6Wzt7c3dwkVgnLxhXLxhXLxRYq5eM1kVKfps88+g729PTp16oRvvvkGt2/fBgAcOnQIn3/+OXr27Ak3NzfMmDHDpMUSbUqlEgcPHuR2QF1xKBdfKBdfKBdfpJiL50xGnRvz8/PDkSNHMHToUHz++ecQBAGMMfTp0weMMfj4+GDnzp3w9PQ0db2EEEIIIWZh9AXFNm3a4NatW9i3bx/Onj2Lp0+fwsnJCW3atEG/fv1gY2NjyjoJIYQQQsyqXKOwrKysMGDAAAwYMMBU9RBCzIQxhtzc3DI/5tmzZ8jMzEROTk6Z/7Hk4OAAQRDK9BhCCDGXct977t/E0u49xxiDUqmElZWVpL54KJd5PH/+HJUrV36lz5mTk4NKlSq90uc0lKW/XsaiXHyRYi5LzGTo97vRZ5oKCwuxe/dunD9/HpmZmVCpVDptBEHA2rVrjX0KYoC8vDw4OjqauwyTo1zEEkj19aJcfJFiLl4zGdVpunfvHrp164Y7d+6gpBNV1GmqWEqlEhEREejduzesra3NXY7JUC7zcHBwQE5OTpke8/z5c1SrVg0AcP/+fVSpUqXMz2mpLP31Mhbl4osUc/GcyahO00cffYTbt2/j3XffxZgxY1CzZk0uF6kihPyPIAhlv1SmUiGwiiOq21qjcsJtOLTrCEEur5gCCSHEzIzq6YSHh6NLly5Yv369qeshhHBCGRUBrFiKQy0avtjw5UzkuXnAZtI0WAUGm7c4QgipAEYtbqlWq9GsWTNT10KMINUzfJTLsimjIlAwbzaQnqq1naU9QcG82S86VBIgldfrZZSLL1LMxWsmo2bPdevWDXZ2dti3b19F1GSxLG32HCHmwFQq5L3THyztSbFtBPdqsN/0J12qI4RwwdDvd6PONC1atAjh4eHYuXOn0QWS8lOr1Xjy5AnUarW5SzEpymXZ1FfiSuwwAQBLfQz1lbhXU1AFkcrr9TLKxRcp5uI5k1Hnxw4cOIDg4GAMGTIEnTt3RvPmzfX2zARBwOeff17uIol+KpUKMTEx6N27N2Qyo/q/FolyWTb2NM2k7SyVVF6vl1EuvkgxF8+ZjOo0ffnll+L/R0ZGIjIyUm876jQRIj2Ci5tJ2xFCCC+M6jRFREhjkCchpOxkTQIguHmUOqZJ1iTg1RVFCCGvgFGdps6dO5u6DmIEQRDg6OhoMcvQmwrlMo8y3Xtu3ERg0ZdgAPSlYWMnIDc/v9TDWPK95yz99TIW5eKLFHPxnInuPVcGNHuOSFlZ7z3X170qvqvng5p2tuK25PwCzLqZhL2pGQYdw5LvPUcI+feo0NlzwItl0L///nu0bt0aTk5OWmsuxMXFYeLEibh586axhycGUKvVuHfvHpczEEpCufiwNzUDr52OR6+L1zD679vodfEaGp2ON7jDZOmk9nppUC6+SDEXz5mMujyXl5eH7t27Izo6Gm5ubnBycsLz58/F/bVq1UJYWBhcXFzw9ddfm6xYok2lUiEuLg5eXl7czUAoCeUyD2PuPccYw7Nnz3D8+HH07dsXNjY2ZX5OS2Xpr5exKBdfpJiL50xGVbtgwQKcPn0aCxcuxKNHjzBu3Dit/c7OzujcuTOOHDlikiIJIRVPc++5svxUrlwZHh4eqFKlCipXrlzmx/M4poEQ8u9lVKdp27ZtCA4OxsyZMyEIgt4Pvtq1ayMpKancBRJCCCGEWAKjOk1JSUlo2bJliW0cHR3x7Nkzo4oy1MWLF9GzZ084OTnB0dER3bt3R1xcnN620dHR6NixIxwcHFC9enVMmTKlzJciLI0gCHB3d5fcv9YpF18oF18oF1+kmIvnTEaNaXJ0dMSTJyXfRuHOnTtwd3c3qihDXLp0CR07doS3tzfmzp0LtVqNlStXonPnzjh37hzq168vto2Li0OXLl3QsGFDLFu2DPfv38eSJUtw69YtHDp0qMJqrGhWVlZo3769ucswOcrFF8rFF8rFFynm4jmTUWea2rZti3379iEzM1Pv/uTkZBw8eBCdOnUqT20l+vzzz2Fvb4+YmBhMnz4dM2bMQHR0NNRqNT755BOttp988gmqVq2KyMhIvP/++/j666+xYsUKHD58GEePHq2wGiuaSqXC9evXoVKpzF2KSVEuvlAuvlAuvkgxF8+ZjOo0zZgxAxkZGejSpQtOnz4NpVIJAMjNzcWJEyfQo0cPKJVKTJs2zaTFFhUVFYWuXbvC1dVV3Obp6YnOnTtj//794qW3rKwsHDt2DMOHD9dae2HEiBGoXLkytm/fXmE1VjS1Wo0bN25wOW2zJJSLL5SLL5SLL1LMxXMmoy7PderUCStWrMDUqVO1ziY5OjoCAORyOVauXIkWLVqYpko9CgoKYG9vr7PdwcEBhYWF+Pvvv9G2bVtcuXIFSqVSZwyWjY0NAgICEBsbW+JzFBQUiH/OysoCACgUCigUCgCATCaDXC6HSqXSegNotiuVShRdP1Qul0MmkxW7XXNcDc36V5qOadHtmscXfYy1tTXUarVWD14QBFhZWRW7vbjazZGpaB7Nf6WQSalUauWSSiZNHg2pZCqaS61Wax2H50xFSSmT5u9T0VxSyVT0c4MyVVyml7MVx6hOEwBMmDABQUFB+Pnnn3H27Fk8ffoUTk5OaNOmDSZOnIhGjRoZe2iD1K9fH2fOnIFKpYJcLgcAFBYW4uzZswCABw8eAABSUlIAvDgL9TJPT09ERUUV+xwLFy7EvHnzdLYfPXpUXF/Gx8cHzZo1w+XLl7VmC9avXx8NGjTAuXPnkJqaKm4PCAiAr68vTp06hezsbHF7u3bt4OHhgaNHj2q90YKDg2Fvb4+DBw9q1dC7d2/xbNqxY8cAvHijhoaGIi0tDTExMWJbR0dHhISEIDk5WWugvLu7O9q3b49bt27hxo0b4nZzZsrLyxPvbXjs2DHJZQJe3LtRapk0pJRJ88Gfnp6O8+fPSyZT9+7dAfzvc0MKmUJDQ5Genq6VSwqZiv59OnbsGGWqwEyG3kKK29uo/Pzzz5gwYQJGjhyJmTNnQq1W4+uvv8Yff/wBhUKBjRs3Yvjw4di4cSNGjBiBs2fPonXr1lrHGDFiBPbu3Vvs2Cx9Z5q8vb2RlpYmXuozZ09epVIhPj4ejRo1EjuOUjiDUVBQgH/++UfMJYVMSqUSKpVKzGVnZyeJTADEXAEBARAEQRKZNLmuXbuGxo0ba7XlOZPmeePj4/Haa6+Jnxu8Z7K2toZCocDly5fFzw0pZFKr1SgsLBQ/N6ysrChTBWXKysqCm5tbqbdR4bbTBACffvopFi9eLIZu2bIlevTogW+++QZ//vkn+vfvj507d2Lw4ME4deoUAgMDtR7/1ltvISoqSjwbVRq69xwhhBAiPRV+7zlL8M033+Dx48eIiorC5cuXcf78ebHnWa9ePQD/uyynr2OUkpICLy+vV1ewialUKsTGxnI5A6EklIsvlIsvlIsvUszFcyauO00AULVqVXTs2BFNmjQBABw/fhw1a9ZEgwYNAACNGzeGlZUVLly4oPW4wsJCxMXFISAg4FWXbDJqtRpJSUlczkAoCeXiC+XiC+XiixRz8ZyJ+05TUdu2bcP58+fx4YcfijcBdHZ2RteuXbFp0yatQWAbN25ETk4OBg8ebK5yCSGEEMIRo2fPmdupU6cwf/58dO/eHa6urjhz5gzCwsLQs2dPTJ06VavtN998g/bt26Nz5874z3/+g/v372Pp0qXo3r07evbsafBzaoZ/aZYeMDeFQoHc3FxkZWXB2tra3OWYDOXiC+XiC+XiixRzWWImzfd6qcO8Gadu377Nunfvztzc3JitrS1r0KABW7hwISsoKNDbPioqirVv357Z2dkxd3d3NmnSJJaVlVWm50xOTmYA6Id+6Id+6Id+6EeCP8nJySX2A7iePfeqqdVqPHz4EI6OjhZxo0HNEgjJycmSms1HufhCufhCufgixVyWmIkxhuzsbHh5eYnDe/Th9vKcOchkMtSsWdPcZehwcnKymDeeKVEuvlAuvlAuvkgxl6VlcnZ2LrWNpAaCE0IIIYRUFOo0EUIIIYQYgDpNHLO1tcXcuXNha2tr7lJMinLxhXLxhXLxRYq5eM5EA8EJIYQQQgxAZ5oIIYQQQgxAnSZCCCGEEANQp4kQQgghxADUaSKEEEIIMQB1mgghhFgsPz8/jBo1ytxlEAKAOk1cuHPnDt577z3Url0bdnZ2cHJyQocOHfDjjz9i1qxZEASh1J+goCBzx9Dxb8yVl5cntlOpVAgLC0NQUBBcXFxga2sLPz8/jB49GhcuXCh3HevWrSvxd3fmzJlyP0dRarUa69atQ9++feHt7Y1KlSqhcePG+Prrr5Gfn1/s465duwZBEGBnZ4fMzEy9bYKCgtC4cWOT1gsACQkJ+OCDD1CvXj04ODjAwcEBr732GiZNmoTLly/rfczMmTMhCAKGDBli8npMhZdcJb1HZ8+e/crqqGiG/l0suk0mk8HLywvdu3dHZGSk1vH8/PwgCAK6du2q9/nWrFkjHscUnyX/tlwloduoWLgDBw5g8ODBsLW1xYgRI9C4cWMUFhbir7/+wowZM9CxY0ds3LhRbJ+Tk4MJEyZgwIABGDhwoLi9WrVq5ii/WP/WXP/88w9Wr16NvLw8DBw4EIcPH0anTp3wySefwMXFBYmJidi+fTvWr1+PpKQkk9y2Z/78+ahVq5bO9jp16pT72EXl5uZi9OjRaNu2Ld5//314eHggJiYGc+fOxYkTJxAeHq73no2bNm1C9erVkZGRgZ07d2LcuHEmras4+/fvx5AhQ2BlZYV33nkHTZs2hUwmw/Xr1/HHH39g1apVSEhIgK+vr/gYxhi2bt0KPz8/7Nu3D9nZ2XB0dHwl9RqKx1z63qMV0Uk2N0P+Lnbr1g0jRowAYwwJCQlYuXIlQkJCcODAAfTq1UtsZ2dnh4iICDx69AjVq1fXOt7mzZthZ2dX4j9WTEmqufQq8Xa+xKzu3r3LKleuzBo0aMAePnyos//WrVvshx9+0NqWmprKALC5c+e+oirLjnIxNmnSJAaAff/99zrtlEolW7x4cal32y5NWFgYA8DOnz9fruMYqqCggJ0+fVpn+7x58xgAduzYMZ19arWa+fn5sWnTprEBAwawoKAgvcfu3Lkza9SokclqvX37NqtUqRJr2LCh3tdKoVCwH3/8kSUlJWltDw8PZwBYeHg4s7a2ZuvWrTNZTabAWy5D3qO+vr5s5MiRr6SeimLo30UAbNKkSVrbLl++zACw7t27i9t8fX1Zly5dmJOTk85nZXJyMpPJZGzQoEEV/vdfqrlKQpfnLNh3332HnJwcrF27Fp6enjr769Spg6lTp5qhsvL5t+e6f/8+fvnlF3Tr1g0ffvihTju5XI6PP/74ld0cWq1W48cff0STJk1gZ2cHd3d39OzZU+v0t1KpxFdffQV/f3/xMuInn3yCgoICsY2NjQ3at2+vc/wBAwYAeHEZ7mWnT59GYmIihg4diqFDh+LUqVO4f/9+BaTU9t133+H58+cICwvT+1pZWVlhypQp8Pb21tq+efNmvPbaawgODkbXrl2xefPmCq+1LKSa62V3797F4MGD4eLiAgcHB7Rt2xYHDhzQahMZGQlBELBt2zZ88sknqF69OipVqoS+ffsiOTlZq+2tW7cwaNAgVK9eHXZ2dqhZsyaGDh2KZ8+evcpYejVp0gRubm5ISEjQ2m5nZ4eBAwdiy5YtWtu3bt2KqlWrokePHq+yzDLjNRddnrNg+/btQ+3atfV+EfHs357r0KFDUCqVePfdd19JXc+ePUNaWprWNkEQ4OrqCgAYO3Ys1q1bh169emHcuHFQKpWIiorCmTNn0LJlSwDAuHHjsH79erz55puYPn06zp49i4ULF+LatWv4888/S3z+R48eAQDc3Nx09m3evBn+/v5o1aoVGjduDAcHB2zduhUzZswwRfRi7d+/H3Xq1EGbNm0MfkxBQQF27dqF6dOnAwDefvttjB49Wu9lBHPhNZe+96i+9wsAPH78GO3bt0dubi6mTJkCV1dXrF+/Hn379sXOnTvFTrrGN998A0EQMGvWLDx58gQ//PADunbtiri4ONjb26OwsBA9evRAQUEBJk+ejOrVq+PBgwfYv38/MjMzDbrzfXlyFv27qE9GRgYyMjL0Xk4fNmwYunfvjjt37sDf3x8AsGXLFrz55puwtrY2Wd2lkWouvcxyfouU6tmzZwwA69evX5keZ+mXsSgXYx999BEDwGJjYyu0Js2pc30/tra2jLH/XZaZMmWKzuPVajVjjLG4uDgGgI0bN05r/8cffyxe0ilJ165dmZOTE8vIyNDaXlhYyFxdXdmnn34qbhs2bBhr2rSpzjFMeXlO81r1799fZ19GRgZLTU0Vf3Jzc8V9O3fuZADYrVu3GGOMZWVlMTs7O72XWM2Bx1wlvUc1Xr489+GHHzIALCoqStyWnZ3NatWqxfz8/JhKpWKMMRYREcEAsBo1arCsrCyx7fbt2xkA9uOPPzLGGIuNjWUA2I4dO8ySU/N3kbEXl7HGjh3LUlNT2ZMnT9jZs2dZly5dGAC2dOlSsZ2vry8LDQ1lSqWSVa9enX311VeMMcauXr3KALCTJ0++ksvzUs1VEro8Z6GysrIAwOIGmZYX5Xr1v4P//ve/OHbsmNbPoUOHAAC7du2CIAiYO3euzuM0g7YPHjwIAJg2bZrWfs2ZiZcvixS1YMECHD9+HIsWLUKVKlW09h06dAjp6el4++23xW1vv/024uPj8c8//5Q9qIE0v//KlSvr7AsKCoK7u7v489///lfct3nzZrRs2VL8l7GjoyNCQ0Mt5lIWz7n0vUeLc/DgQbRu3RodO3YUt1WuXBn/+c9/kJiYiKtXr2q1HzFihNbftTfffBOenp7i+1pzJunIkSPIzc01ZSwdJf1d1Fi7di3c3d3h4eGBNm3a4PTp05g2bVqxl/LfeustbN26FcCL19Lb2xuBgYEVmuNlUs2lD12es1BOTk4AgOzsbDNXYlqU69X/Dlq3bi1eZnvZnTt34OXlBRcXl2Iff+/ePchkMp3T6NWrV0eVKlVw7949vY/btm0bPvvsM4wdOxYTJkzQ2b9p0ybUqlULtra2uH37NgDA398fDg4O2Lx5MxYsWGBoxDLRfIHm5OTo7Pvll1+QnZ2Nx48fY/jw4eL2zMxMHDx4EB988IFYKwB06NABu3btws2bN1GvXr0KqddQPOcq6T36snv37um9/NiwYUNxf9GZd3Xr1tVqJwgC6tSpg8TERABArVq1MG3aNCxbtgybN29GYGAg+vbti+HDh5v00hxgWM5+/frhgw8+gCAIcHR0RKNGjVCpUqVi2w8bNgw//fQT4uPjsWXLFgwdOlTvLNWKJNVc+lCnyUI5OTnBy8sLf//9t7lLMSnKBTRo0AAAcOXKFQQEBFRwZaZTlg+sY8eOYcSIEQgNDcXPP/+ssz8rKwv79u1Dfn6+zpca8GL8gmYsiqk5OzvD09NT72ul+TLWfKFq7NixAwUFBVi6dCmWLl2q87jNmzdj3rx5Jq+1LKSa61VYunQpRo0ahT179uDo0aOYMmUKFi5ciDNnzryyCRkaNWvWLHadIn3atGkDf39/fPjhh0hISMCwYcMqsDrjSSUXXZ6zYH369MGdO3cQExNj7lJM6t+eq1evXpDL5di0adMrqqx4/v7+ePjwIZ4+fVpsG19fX6jVaty6dUtr++PHj5GZmam13g8AnD17FgMGDEDLli2xfft2WFnp/tvsjz/+QH5+PlatWoUdO3Zo/Xz99de4d+8eTp8+bZqQeoSGhuL27ds4d+6cQe03b96Mxo0b69S6Y8cOdO3aVWemj7lINVdRvr6+uHHjhs7269evi/uLevl9yxjD7du34efnp7W9SZMm+Oyzz3Dq1ClERUXhwYMHejv8lujtt99GZGQkGjZsyNU/xEpjibmo02TBZs6ciUqVKmHcuHF4/Pixzv47d+7gxx9/NENl5fNvz+Xt7Y3x48fj6NGjWL58uU47tVqNpUuXvpKp94MGDQJjTO/ZhBfjN4HevXsDAH744Qet/cuWLQPw4ota49q1awgNDYWfnx/2798Pe3t7vc+7adMm1K5dG++//z7efPNNrZ+PP/4YlStXrtAxNTNnzoSDgwPGjBmj97XSZAeA5ORknDp1Cm+99ZZOrW+++SZGjx6N27dv4+zZsxVWr6Gkmquo3r1749y5c1r/OHn+/DlWr14NPz8/vPbaa1rtN2zYoHUpfOfOnUhJSREXVMzKyoJSqdR6TJMmTSCTybSW1LBk48aNw9y5c/WeLeSZJeaiy3MWzN/fH1u2bMGQIUPQsGFDrRWmo6OjsWPHDi7vyUS5XlwOuHPnDqZMmYI//vgDffr0QdWqVZGUlIQdO3bg+vXrGDp0qEnqOnTokPiv8KLat2+P4OBgvPvuu/jpp59w69Yt9OzZE2q1GlFRUQgODsYHH3yApk2bYuTIkVi9ejUyMzPRuXNnnDt3DuvXr0f//v0RHBwM4MUYrR49eiAjIwMzZszQGSDu7++Pdu3a4eHDh4iIiMCUKVP01mtra4sePXpgx44d+OmnnypkinHdunWxZcsWvP3226hfv764cjb7/9WKt2zZAplMhpo1a2LLli1gjKFv3756j9W7d29YWVlh8+bNZZrqXxGkmquo2bNnY+vWrejVqxemTJkCFxcXrF+/HgkJCdi1axdkMu1zAS4uLujYsSNGjx6Nx48f44cffkCdOnUwfvx4AEB4eDg++OADDB48GPXq1YNSqcTGjRshl8sxaNAgk9Ze0t/F2rVrG31cX19ffPnll+WorHykmksvs8zZI2Vy8+ZNNn78eObn58dsbGyYo6Mj69ChA1u+fDnLz8/XamvpU/OL+rfnUiqV7Ndff2WBgYHM2dmZWVtbM19fXzZ69GiTLEdQ0nRgACwsLEysY/HixaxBgwbMxsaGubu7s169erGLFy+Kx1IoFGzevHmsVq1azNramnl7e7M5c+Zo5UlISCjx+TTTxpcuXcoAsBMnThRb+7p16xgAtmfPHsaY6VcE17h9+zabMGECq1OnDrOzs2P29vasQYMG7P3332dxcXGMMcaaNGnCfHx8SjxOUFAQ8/DwYAqFwuQ1GoOXXMauCH7nzh325ptvsipVqjA7OzvWunVrtn//fq02miUHtm7dyubMmcM8PDyYvb09Cw0NZffu3RPb3b17l40ZM4b5+/szOzs75uLiwoKDg9nx48dNnrO0v4vQs3K2Ppqp+YY8p7mWHOA5V0kExoqcryWEEEIkIDIyEsHBwdixYwfefPNNc5dDJILGNBFCCCGEGIA6TYQQQgghBqBOEyGEEEKIAWhMEyGEEEKIAehMEyGEEEKIAajT9C8WGRkJQRAQGRlp7lIIMasvv/zS6Fu2bNy4EQ0aNIC1tbXOTYkJMUR53n/k1aJOEyGEGOn69esYNWoU/P39sWbNGqxevdrcJZXZypUrsW7dOnOXQSQiOjoaX375JTIzM416vKW/H2lFcEIIMVJkZCTUajV+/PFH1KlTx9zlGGXlypVwc3PjchV+Ynmio6Mxb948jBo1yqgzr5b+fqQzTRxSq9XIz883dxmE/Os9efIEAOiyHCH/EtRpMiPNdezr16/jrbfegpOTE1xdXTF16lStTpEgCPjggw+wefNmNGrUCLa2tjh8+DAA4MGDBxgzZgyqVasGW1tbNGrUCL/99pvOc92/fx/9+/dHpUqV4OHhgY8++shkN6PU5Lh58yaGDx8OZ2dnuLu74/PPPwdjDMnJyejXrx+cnJxQvXp1nZsvFhQUYO7cuahTpw5sbW3h7e2NmTNn6tQXFhaGkJAQeHh4wNbWFq+99hpWrVqlU8+FCxfQo0cPuLm5wd7eHrVq1cKYMWNMkpXw76+//kKrVq1gZ2cHf39//PLLL3rbbdq0CS1atIC9vT1cXFwwdOhQJCcni/v9/Pwwd+5cAIC7uzsEQajw+2Tdu3cPEydORP369WFvbw9XV1cMHjwYiYmJWu2KGyOzbt06CIIgtvfz88M///yDkydPQhAECIKAoKAgsf3du3cxePBguLi4wMHBAW3bttW5pyApm9Lef507d0bTpk31PrZ+/fro0aMHACAxMRGCIGDJkiVYvXo1/P39YWtri1atWuH8+fMVnkOfL7/8EjNmzAAA1KpVS3xPJSYmQqlU4quvvhLr9PPzwyeffKL1OV/a+9ES0OU5C/DWW2/Bz88PCxcuxJkzZ/DTTz8hIyMDGzZsENuEh4dj+/bt+OCDD+Dm5gY/Pz88fvwYbdu2FTtV7u7uOHToEMaOHYusrCx8+OGHAIC8vDx06dIFSUlJmDJlCry8vLBx40aEh4ebNIfmRrWLFi3CgQMH/q+9O4+K4kr/Bv5tmqWhQZpVMGKDiBtuEdcRaUGOjCiLQpAwEhSVGA1qojFGnR8yo3Hfhhlx9IyIGreIG47LjIo77sYNUBQbMy6Isoi4ITzvH7xdoewGGyVC4vM5h3PoW7duPbf6VvXtqlu3MXPmTFhbW+Of//wnfHx8MHfuXPzwww+YNGkSunbtCi8vL1RUVCAwMBDHjh1DTEwM2rRpg8uXL2Px4sW4fv06tm/fLpSfmJgId3d3BAYGwtDQEKmpqRgzZgwqKiowduxYAJXf/Pv16wc7OztMmTIFCoUCarUaW7durdO6st+my5cvC+1jxowZePXqFeLi4tC4cWNRvlmzZuHPf/4zwsLCMHLkSOTn5yMhIQFeXl64cOECFAoFlixZgjVr1mDbtm1ITEyEubk5OnTo8KvGf+bMGZw4cQLh4eFo2rQp1Go1EhMT0adPH2RkZMDMzKxW5S1ZsgSxsbEwNzfHtGnTAEDYF3l5efjDH/6Ap0+fYty4cbCxsUFycjICAwOxZcsWDBo0qM7r93unT/uLjIzEqFGjcOXKFbRr105IP3PmDK5fv47p06eLyly/fj1KSkrw+eefQyKRYN68eRg8eDBycnJ+lR+7rsngwYNx/fp1bNiwAYsXL4atrS2Ayi8VI0eORHJyMkJDQzFx4kScOnUKs2fPRmZmJrZt2wag5vbYYNTLL94xIiKKi4sjABQYGChKHzNmDAGgixcvElHljx0aGBjQ1atXRflGjBhBjo6O9PDhQ1F6eHg4WVpa0tOnT4mIaMmSJQSANm/eLOQpLS2lFi1aEABKS0urk3rExMQIaa9evaKmTZuSRCKhOXPmCOmFhYVkamoq/ADn2rVrycDAgI4ePSoqc/ny5QSAjh8/LqRp6lOVn58fNW/eXHi9bdu2ev0xR9awBQcHk0wmE/1ga0ZGBkmlUtKcDtVqNUmlUpo1a5Zo3cuXL5OhoaEoXdP28/Pz30v8uo6B9PR0AkBr1qzRiut1mh87vXXrlpDm7u5OKpVKK++ECRMIgOjYLCkpIRcXF3J2dqby8vJ3q8wHSJ/2V1RURDKZjL799lvRuuPGjSO5XE5Pnjwhol9+INvGxoYKCgqEfDt27CAAlJqa+h5qpG3+/Plabeynn34iADRy5EhR3kmTJhEAOnjwoJBWXXtsKPj2XAOguUqiERsbCwDYvXu3kKZSqdC2bVvhNREhJSUFAQEBICI8fPhQ+PPz80NxcTHOnz8vlOPo6Cj60UozMzPExMTUaT1Gjhwp/C+VStGlSxcQEUaMGCGkKxQKtGrVCjk5OQCAH3/8EW3atEHr1q1FdfDx8QEApKWlCeuampoK/xcXF+Phw4dQqVTIyclBcXGxUD4A7Nq1C2VlZXVaP/bbVl5ejn379iE4OBjNmjUT0tu0aSPc8gCArVu3oqKiAmFhYaI26eDgADc3N1GbfN+qHgNlZWV49OgRWrRoAYVCIRzvdWX37t3o1q0bPD09hTRzc3PExMRArVYjIyOjTrf3e6dv+7O0tERQUBA2bNgA+v9zT5eXl2PTpk3CEIuqhgwZAisrK+F17969AUA4xzYEms+yr7/+WpQ+ceJEAPhN3fLlTlMD4ObmJnrt6uoKAwMD0TgFFxcXUZ78/HwUFRVhxYoVsLOzE/0NHz4cwC+DVHNzc9GiRQutMQ6tWrWq03pUPREAlQe/TCYTLtFWTS8sLAQAZGdn4+rVq1p1aNmypagOAHD8+HH4+vpCLpdDoVDAzs4OU6dOBQCh06RSqRASEoL4+HjY2toiKCgISUlJdTZ+i/125efn49mzZ1rHGyA+FrKzs0FEcHNz02qXmZmZojb5vj179gz/93//BycnJ5iYmMDW1hZ2dnYoKioSjoG6kpubq/Mc0aZNG2E505++7Q8APvvsM9y+fRtHjx4FAOzfvx95eXmIjIzUWvf1866mA6U5xzYEubm5MDAw0HrC1MHBAQqF4jfVlnhMUwOkawBn1W+YQOUTdAAwdOhQREVF6Szn1x5f8TqpVKpXGgDhG1RFRQXat2+PRYsW6czn5OQEALh58yb69u2L1q1bY9GiRXBycoKxsTF2796NxYsXC/tDIpFgy5YtOHnyJFJTU7Fv3z5ER0dj4cKFOHnyJMzNzeuiqux3rKKiAhKJBHv27NHZfuuzDcXGxiIpKQkTJkxAz549YWlpCYlEgvDwcOEYAHSfQ4DKKxas4fPz80Pjxo2xbt06eHl5Yd26dXBwcICvr69W3jedYxuS38MEntxpagCys7NFV5Ju3LiBiooKODs7V7uOnZ0dLCwsUF5ervNAqkqpVOLKlSsgIlGjvXbt2jvH/q5cXV1x8eJF9O3bt8YDKjU1FS9evMDOnTtF36yqu1XSo0cP9OjRA7NmzcL69evxpz/9CRs3bhTdQmQfFjs7O5iamiI7O1trWdVjwdXVFUQEFxcX4YpnQ7FlyxZERUWJnkB9/vy51kSCmqsNRUVFoukQdH2jr+64UyqVOs8RWVlZwnKmP33bH1DZEYqIiMDq1asxd+5cbN++HaNGjaq2g9SQ6GpPSqUSFRUVyM7OFq5UApUPGxQVFYnaUkPvWPHtuQbgH//4h+h1QkICAKB///7VriOVShESEoKUlBRcuXJFa3l+fr7wv7+/P+7evYstW7YIaU+fPm0QsxeHhYXhzp07WLlypdayZ8+eobS0FMAv36aqfnsqLi5GUlKSaJ3CwkKtb1idOnUCAL5F94GTSqXw8/PD9u3bcfv2bSE9MzMT+/btE14PHjwYUqkU8fHxWm2JiPDo0aP3FvPrpFKpVkwJCQlaV5BcXV0BAEeOHBHSSktLkZycrFWmXC7XOXuzv78/Tp8+jfT0dFEZK1asgLOzs2iMJXszfdufRmRkJAoLC/H555/jyZMnGDp06PsM961pxlxVbVP+/v4AKp+Oq0pzh2HAgAGi9d92NvH3ga80NQC3bt1CYGAg/vjHPyI9PR3r1q1DREREtXN1aMyZMwdpaWno3r07Ro0ahbZt26KgoADnz5/H/v37UVBQAAAYNWoU/v73v+Ozzz7DuXPn4OjoiLVr19b68eRfQ2RkJDZv3ozRo0cjLS0NvXr1Qnl5ObKysrB582bs27cPXbp0Qb9+/WBsbIyAgADhJLJy5UrY29vj3r17QnnJyclYtmwZBg0aBFdXV5SUlGDlypVo1KiRcOCyD1d8fDz27t2L3r17Y8yYMXj16hUSEhLg7u6OS5cuAajscMycORPfffcd1Go1goODYWFhgVu3bmHbtm2IiYnBpEmT6iX+gQMHYu3atbC0tETbtm2Rnp6O/fv3w8bGRpSvX79+aNasGUaMGIFvvvkGUqkUq1atgp2dnegDGwA8PDyQmJiImTNnokWLFrC3t4ePjw+mTJmCDRs2oH///hg3bhysra2RnJyMW7duISUlBQYG/J27tvRpfxoff/wx2rVrJzws07lz53qKunY8PDwAANOmTUN4eDiMjIwQEBCAqKgorFixAkVFRVCpVDh9+jSSk5MRHBwMb29v0fq62mODUR+P7LFKmseCMzIyKDQ0lCwsLMjKyoq+/PJLevbsmZAPAI0dO1ZnGXl5eTR27FhycnIiIyMjcnBwoL59+9KKFStE+XJzcykwMJDMzMzI1taWxo8fT3v37q3TKQdef+w6KiqK5HK5Vn6VSkXu7u7C65cvX9LcuXPJ3d2dTExMyMrKijw8PCg+Pp6Ki4uFfDt37qQOHTqQTCYjZ2dnmjt3Lq1atUr0eOv58+fp008/pWbNmpGJiQnZ29vTwIED6ezZs+9UR/b7cfjwYfLw8CBjY2Nq3rw5LV++XOcj+ikpKeTp6UlyuZzkcjm1bt2axo4dS9euXRPyvO8pBwoLC2n48OFka2tL5ubm5OfnR1lZWaRUKoVpPDTOnTtH3bt3J2NjY2rWrBktWrRI55QD9+/fpwEDBpCFhQUBED3uffPmTQoNDSWFQkEymYy6detGu3btei91/b3St/0REc2bN48A0Pfff6+1TDPlwPz587WWAaC4uLhfI3y9/PWvf6WPPvqIDAwMhPZWVlZG8fHx5OLiQkZGRuTk5ETfffcdPX/+XLRuTe2xIZAQNcDRYh+IGTNmID4+Hvn5+VpPmDHGGPuwLV26FF999RXUarXWU3KsfvD1VcYYY6yBISL861//gkql4g5TA8JjmhhjjLEGorS0FDt37kRaWhouX76MHTt21HdIrAruNDHGGGMNRH5+PiIiIqBQKDB16lQEBgbWd0isCh7TxBhjjDGmBx7TxBhjjDGmB+40McYYY4zpgTtNjDHGGGN64E4TY4wxxpgeuNPEGGOMMaYH7jQx1oCtXr0aEokEq1evru9Q9DJjxgxIJBIcOnSovkNp0NRqNSQSCYYNG1Yv2+/Tp0+D/zX5962+3xP228CdJsbq0W/tRH3o0CFIJBLMmDGjvkNp8LhjUnvOzs5wdnau7zAYqxZPbslYAzZo0CD06NEDjo6O9R2KXr788kuEh4fzzz4wxn6XuNPEWANmaWkJS0vL+g5Db7a2tvzj04yx3y2+PcdYLRw5cgQBAQGwtbWFiYkJ3NzcMH36dDx9+lQrb0pKClQqFezt7SGTydCkSRP4+voiJSUFQOV4JRcXFwBAcnIyJBKJ8KcZE1TdmCaJRII+ffrgzp07iIiIgK2tLSwsLDBgwADk5OQAADIzMxEcHAxra2tYWFggNDQUeXl5WnGuWrUKQUFBcHZ2hkwmg7W1Nfz8/JCWlibKN2PGDHh7ewMA4uPjRfGq1WohT3VjmlJTU+Ht7Q1LS0uYmpqiY8eOWLRoEV69eiXKV/WW5Y0bNzBo0CBYWVlBLpfD19cXFy9erPlNqmLYsGGQSCTIycnBggUL0LJlS5iamqJt27bYuHEjAODly5eYNm2aUP8OHTpgz549OssrKSlBXFwc3N3dYWpqCoVCAT8/Pxw7dkyUTyKR4PDhw8L/mj9dt2FrU8crV64gLCwM9vb2MDExgYuLCyZMmIBHjx7pzH/s2DGoVCrI5XLY2NhgyJAh+Pnnn/XdfSJJSUno3r07zM3NYW5uju7du+sca1f1Fu6JEyfQr18/KBSKGm9Vat7z3Nxc5ObmivbZ67eC9Y3jbfMzVhO+0sSYnhITEzF27FgoFAoEBATA3t4eZ8+exaxZs5CWloa0tDQYGxsLeceMGQNHR0cMGjQINjY2uH//Pk6fPo1t27YhJCQEnTp1wvjx47F06VJ07NgRwcHBwrb0GddRWFgIT09PODg4ICoqCtevX8euXbuQlZWFHTt2oHfv3vDw8EB0dDTOnTuHlJQUFBQU4ODBg6Jyxo4di44dO8LX1xd2dna4c+cOtm/fDl9fX2zduhVBQUEAKsfoqNVqJCcnQ6VSoU+fPkIZCoWixlgXLVqEiRMnwtraGhEREZDL5di5cycmTpyIo0ePYuvWrVofqmq1Gj169IC7uzuio6Nx8+ZN7NixA97e3sjMzETjxo3fuI80vv76a5w6dQoBAQGQSqXYuHEjIiIiYGVlhYSEBGRkZGDAgAF4/vw51q9fj6CgIGRmZsLV1VUoo6CgAF5eXrh69Sp69eqF0aNH4/Hjx0JMP/74o/AexsXFYfXq1cjNzUVcXJxQRqdOnd66jseOHYOfnx9evnyJ0NBQODs7Iz09HUuXLsWuXbtw8uRJ0VW+AwcOoH///jAwMMCQIUPQpEkTHDhwAL169YKVlZXe+w4Axo0bh4SEBHz00UcYMWIEgMovBcOHD8eFCxewdOlSrXVOnDiB77//Ht7e3oiJicHt27erLV+hUCAuLg5LliwBAEyYMEFYVrWd1TaOt4mbsRoRY+yNrl69SoaGhtSxY0d6+PChaNns2bMJAC1YsEBI69y5MxkbG1NeXp5WWVXXv3XrFgGgqKgondtNSkoiAJSUlCRKB0AA6KuvvhKlf/HFFwSAFAoFLVmyREivqKggf39/AkDnzp0TrZOTk6O13bt371KTJk3Izc1NlJ6WlkYAKC4uTme8cXFxBIDS0tKEtBs3bpChoSHZ29vT7du3hfTnz5+Tp6cnAaA1a9YI6Zp9AoDmzJkjKn/69OkEgGbPnq1z+6+LiooiANSyZUt68OCBkH7q1ClhP3l6etKTJ0+EZZs2bSIAFBsbKyorIiKCANDKlStF6Xl5eeTk5ER2dnb07NkzIV2lUlF1p9ja1rG8vJxcXV0JAO3du1eU/5tvviEAFB0dLcrfvHlzkkgkdPToUSG9oqJCqIe+p//Dhw8TAGrTpg0VFRUJ6QUFBdSyZUsCQEeOHBHSNW0EAK1atUqvbWgolUpSKpV1Ekdt87/pWGSMiIg7TYzpYdy4cVonWY3y8nKys7MjDw8PIa1z584kl8upoKCgxnLfpdNkbm5OpaWlovQjR44QAHJ1daWKigrRsjVr1tTqgyw2NpYAkFqtFtLeptP0l7/8hQDQ3LlztfIfP36cAJCPj4+QptknLi4uVF5eLsqvWTZ48GC96qDpNCUnJ2sta968OQGgw4cPi9JfvXpFRkZG5OXlJaTl5+eTVCoVxVnV3/72NwJAqampQpo+nSZ966h5X/v3769VVklJCVlbW5NMJqMXL14Q0S8dhoCAAK38arWapFKp3p2m6OhoAkCbNm3SWvbDDz9oddg0baRz5856lV9VTZ2m2sZR2/zcaWL64NtzjOnh5MmTAIB9+/bhwIEDWsuNjIyQlZUlvA4PD8fkyZPRrl07REREwNvbG56enmjUqFGdxeTm5gYzMzNRmuYpuw4dOmjd7tIsu3v3rig9JycHs2fPxsGDB3Hnzh28ePFCtPzu3btQKpVvHeeFCxcAiG+zaPTs2RMymQw//fST1rJOnTrBwEA87LJp06YAgKKiolrF8PptMaByf+Tk5Ggtk0qlsLe3F+2nM2fOoLy8HC9evNA53UJ2djYAICsrCwMHDqxVXPrUsaZ9aG5uji5duuA///kPrl27hvbt2wtjonr37q2VX6lUwsnJSRiH9iY1bVszxk3X+9e1a1e9ytdXbeN427gZqwl3mhjTQ0FBAQBg1qxZeuWfNGkSbGxskJiYiIULF2LBggUwNDTEgAEDsHjxYmEA+LvQ1QEzNDR847KysjIh7caNG+jWrRseP34Mb29vBAQEoFGjRjAwMMChQ4dw+PBhrU5UbT1+/BgAdI5BkkgkaNy4Me7cuaO1rKY6lJeX1yqGt9lXVfeT5v0/fvw4jh8/Xu12SktL6yyuqnWsaR8Cv3SINfmKi4sBAPb29jrzN27cWO9O0+PHj2FgYAA7Ozud5UgkEmG7ry+rS7WN423jZqwm3GliTA+aD7fHjx/DwsLijfklEgmio6MRHR2NR48e4ejRo9iwYQM2b96M7OxsXLp0CVKp9NcO+40WL16MwsJCrF27FkOHDhUtGz16tPAE2LvQ7Lu8vDytK1ZEhLy8vDq9Avdr0MQ3ceJELFiwoN62r+vpRwC4f/++KJ9mmooHDx7ozF9dOdVtu6KiAvn5+VqdsAcPHoCIdL5/dT2xZ23jeNu4GasJTznAmB66d+8O4JfbdLVhY2OD4OBgbNq0CT4+PsjIyMCNGzcAQOg41fbKSV25efMmAAhPyGkQkc4rKm8T78cffwwAOqchOHXqFJ4/f67z9llD0rVrV0gkEqSnp+u9Tl2+tzXtw9LSUpw9exampqZo1aoVAKBjx44AgKNHj2rlz83NrdW0AzVtW5NWV++fVCqtdn/VNo73GTf7cHCniTE9jBkzBoaGhoiNjdX56HRRUZEwhgKoPCkTkShPWVmZcJtHJpMBAKysrCCRSN567px3pbny8/o8Q3PmzMGVK1e08ltbWwNAreKNiIiAoaEhFi1aJBon9PLlS3z77bcA0OB/RsbBwQFhYWE4ceIE5s+fr/XeApUdwKrzdb3NvqpOr1694Orqij179mD//v2iZTNnzsSjR4/w6aefClNeeHp6wsXFBbt27RK9t0SEqVOn1qojFxUVBaBybq6qt7OKi4sRHx8vyvOurK2t8fDhQzx//vyd43ifcbMPB9+eY0wP7dq1w7Jly/DFF1+gVatW8Pf3h6urK0pKSpCTk4PDhw9j2LBhWL58OQAgODgYjRo1Qo8ePaBUKlFWVob//ve/yMjIQGhoqNBZMTc3R9euXXHkyBFERkbCzc0NBgYGiIyMfKfB1/oaPXo0kpKSEBISgrCwMNjY2ODkyZM4f/48BgwYgH//+9+i/K1bt0aTJk2wceNGmJiYoGnTppBIJIiNja125nJXV1fMnTsXEydORIcOHRAWFga5XI7U1FRcu3YNQUFBWrcGG6Jly5bh2rVrmDx5MtauXYuePXtCoVDg559/xtmzZ5GdnY179+4Jg/N9fHywZcsWhISEoH///pDJZOjYsSMCAgJqvW0DAwOsXr0afn5+8Pf3xyeffAKlUon09HQcOnQIrq6umDNnjij/ihUr4O/vD19fX2GepoMHD+LevXvo0KEDLl26pNe2vby8EBsbi4SEBLRr1w4hISEgIqSkpOB///sfxo0bBy8vr1rXSRcfHx+cPXsW/fv3R+/evWFsbAwvLy/hrzZxvM+42Qeknp7aY+w36fTp0xQeHk5NmjQhIyMjsrW1pc6dO9OUKVMoMzNTyLds2TIKDAwkpVJJMpmMbGxsqFu3bpSYmEgvX74UlXnt2jXy9/cnhUJBEolE9Mh+TVMOqFQqrfhqemy6uukC0tLSqFevXmRhYUEKhYL8/f3p3LlzOqcPICI6efIkqVQqsrCwEObjuXXrFhHpnnJAY8eOHcJ6JiYm1L59e1q4cCGVlZXpXYea6q6LZsoBTXxV1TQlQHWPvj99+pTmzZtHHh4eJJfLydTUlFxcXCg4OJjWrFkjqktZWRlNnjyZmjVrRoaGhqI6vW0dL126RKGhoWRra0tGRkakVCpp/PjxlJ+fr7OcI0eOkJeXF5mampK1tTV98sknlJubW2Pdq7Nq1Srq2rUrmZmZkZmZGXXt2lXn9BVvmpaiJiUlJTRq1ChydHQUpkV4vRx946htfp5ygOlDQqTjOjNjjDHGGBPhMU2MMcYYY3rgThNjjDHGmB6408QYY4wxpgfuNDHGGGOM6YE7TYwxxhhjeuBOE2OMMcaYHrjTxBhjjDGmB+40McYYY4zpgTtNjDHGGGN64E4TY4wxxpgeuNPEGGOMMaYH7jQxxhhjjOnh/wH75hqFdYBzDwAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "exp='SQUAD-v1-1'\n", + "file_name = 'res-sophIA-squad3.json'\n", + "name = 'SQUAD-v1-1'\n", + "plot_all_error_bar(folder, file_name, name, exp, meas_calc_list, to_save=do_save)\n", + "# plot_evolution(folder, file_name, name, exp, meas_calc_list, to_save=do_save)\n", + "\n", + "new_file_name = 'res-sophIA-squad3_dyn-EPM.json'\n", + "total_to_dynamic_EPM(exp,folder, file_name, new_file_name, meas_calc_list, idle_power)\n", + "plot_all_error_bar(folder, new_file_name, name+'_dyn-EPM', exp, meas_calc_list_2, to_save=do_save)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "c23f1f2f", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAk0AAAHPCAYAAABOau4WAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAACkgUlEQVR4nOzdd1gUV9sG8Ht2WXpRBASVZjc2jL0gxd5bLFGjJpY3sSYmxmiK0SRqYoxJNJr6WmKJLbF3gUgUWyJq7AUEFQWk9y3n+8Nv52XZXViGBXZ2nt91eV165uzsczsLHGbOnOEYYwyEEEIIIaRUsuougBBCCCFEDGjQRAghhBBiAho0EUIIIYSYgAZNhBBCCCEmoEETIYQQQogJaNBECCGEEGICGjQRQgghhJiABk2EEEIIISagQRMhhBBCiAlo0ERIBXEcV+4/oaGhlVLLxx9/DI7j8PHHH5tlf/Hx8eA4DgEBAWbZnxRMmjQJHMdh0qRJ1V2KxdB+jjiOg4ODAx4+fGi0r42NDTiOQ3x8fNUVSIiJbKq7AELEbuLEiXptT548wdGjR41ub9q0aaXXRYglKigowEcffYT//ve/VfJ+HMcBAOiJYcQcaNBESAVt2LBBry0qKoofNBnaXllmzpyJMWPGwMPDwyz7q1u3Lm7cuAGFQmGW/RFp4zgOdnZ22LRpE95++200b968uksipFzo8hwhVsTDwwNNmzY126BJoVCgadOmaNCggVn2R6RNJpNh1qxZUKvVWLhwYXWXQ0i50aCJkCpWfN5RQkICJk+eDF9fXygUCp15ML///jumTJmCFi1aoGbNmrC3t0dgYCBee+013Lp1q8x9F7dhwwZ+nk1ubi4WLFiAhg0bws7ODt7e3pg4cSIePXqkt7/S5jRp56gAwO7du9GtWze4urrCyckJXbt2xaFDh4z+Hzx48ACTJk2Ct7c37O3t0ahRIyxatAgFBQUIDQ0Fx3GIiooq8/+yOJVKhe+//x5dunSBm5sbv9/Zs2cbzFbRDOUVFRXFz2crLCzE4sWL0bhxY9jb28PPzw/z589HQUEBACAzMxPvvPMO6tevD3t7ewQEBODjjz+GSqXS229KSgq+/fZb9O/fH4GBgXBwcICrqyvatWuHzz//nN+nIf/++y9GjBgBDw8PODo6omXLlvj666+h0WgQEBBgdG6RSqXCzz//jNDQULi7u8POzg6BgYF44403kJiYWOr/w4IFC1CzZk3s27cPp0+fLt9/IoBdu3ahb9++8PT0hK2tLerWrYvx48fj+vXrOv20XwtaJecV0pwpIggjhJhdZGQkA8AMfYktWrSIAWBjx45l7u7uzNvbm40YMYINHz6cvf3223w/uVzOHB0dWbt27djw4cPZ4MGDWf369RkA5uTkxE6fPm1034sWLdJpX79+PQPAhg4dylq1asVq1KjBBg0axIYMGcK8vLwYAObv788yMjJ0XhcXF8dvK0mb76OPPmIcx7GuXbuy0aNHs9atWzMAjOM49vvvv+u97tq1a8zDw4MBYHXq1GGjRo1iAwYMYE5OTqxbt26sS5cuDACLjIw07T+bMVZQUMB69uzJADB7e3vWr18/Nnr0aObr68sAMA8PD/b333+bLUNpJk6cyACwiRMn6rRrPxOdO3dmISEhzNXVlQ0ePJgNHDiQubm5MQBs4MCB7NmzZ6xJkybM09OTjRgxgvXu3ZvZ29szAOz111/Xe79ff/2VAWB169ZlISEhbMyYMaxHjx7M2dmZf7+CggK910VFRTEHBwcGgDVo0ICNGTOG9erVi9na2rLRo0czf39/BoDFxcXpvC4rK4uFhoYyAMzZ2ZmFhISwl156iTVp0oQBYLVq1WL//POPzmu0nyO5XM4YY+zzzz9nAFjXrl316pLL5QbfV6lUslGjRjEAzM7OjnXp0oWNHDmSP1YODg7s8OHDfP8//viDPxba41H8T0pKSmmHkRCDaNBESCUwZdAEgI0fP97gDzTGGPvtt99YTk6OTptGo2HfffcdA8CaN2/ONBqNwX0bGzQBYH369GGZmZn8trS0NBYUFMQAsKVLl+q8zpRBU40aNdjZs2cN1tG4cWO917344osMABszZoxO9ocPH/I/eMs7aJo/fz7/w7/4D9uioiI2efJkBoAFBgaywsJCs2QoTVmDJgCsQ4cOLDU1ld8WHx/PatasyQCwli1bskGDBrHc3Fx++4ULF5iNjQ2TyWTswYMHOvu9fv06i4mJ0asjLS2N9e7dmwFgX3zxhc62vLw8VrduXQaAvf3220ytVvPbrl27xmrXrs3XWnLwMnbsWH6A9/TpU51tq1atYgBYo0aNmEql4ttLDpry8vJYvXr1GAC2d+9enX0YGzQtXLiQAWAdO3Zk9+/f19m2c+dOJpfLWc2aNVl6errONmNfh4QIQZ8kQiqBKYMmd3d3vTM7purcuTMDwK5du2Zw38YGTU5OTuzx48d6+/vtt98YABYeHq7Tbsqg6dtvv9XbVlBQwJ89SUhI4NtPnTrFn6F49uyZ3usOHDhQ7kFTfn4+f1Zl3759ettzc3P5QcCWLVsqnKEsZQ2aOI5jV69e1Xvd7Nmz+f+bkoMRxhgbNGgQA8A2btxoci23bt1iAFj79u112jdt2sQf16KiIr3XrVmzxuCg6fr164zjOFanTh2WlZVl8D379+/PALD9+/fzbSUHTYwx9vPPP/OD/+IDLEODpmfPnjEHBwdmb2/PHj58aPB9p0+fzgCw1atX67TToImYE81pIqSa9OzZE25ubqX2uXv3LtasWYM333wTkydPxqRJkzBp0iQ8ffoUAIzObTKmXbt28PHx0Wtv1qwZABid+1OaQYMG6bXZ2dmhfv36evv8888/AQB9+/aFu7u73usGDBiAGjVqlOv9L168iJycHLi7uxusxdHREWPGjAEAREZGVjhDRfn5+aFFixZ67Y0aNQIAtG3bFl5eXka3P378WG+bWq3GyZMn8cknn2D69Ol49dVXMWnSJHz22WcA9D8n2uMwcuRIg3dGjhs3zmDthw4dAmMM/fr1g4uLi8E+2jXIzpw5Y3C71qRJk/DCCy/g2rVr2LhxY6l9IyMjkZ+fj65du6Ju3boVel9CKoKWHCCkmpS2YKRarcbMmTPxww8/lLq+TFZWVrne08/Pz2C7q6srAJQ6adgc+9Qualhadn9/f2RkZJj8/toBTWBgoNE+2rv/jA1+KuP/xRhj7+Xs7Fzqdu0gpWQtd+7cwbBhw3Dt2jWj71nyc1LWcahRowbc3NyQmZmp037//n0AwC+//IJffvnF6PsBzyeol0Yul2Pp0qUYOnQoFi1ahLFjx8Le3t5gX+37njx5Umdyt5D3JaQiaNBESDVxcHAwuu2bb77B999/D29vb3z11Vfo0qULateuzf9QGTt2LLZt21buBftkMvOfXBayz9J+8JX1Q7EyVMb/i9D3Km8tL730Eq5du4aBAwfi3XffxQsvvABXV1coFAoUFRXBzs7O6GvLexw0Gg0AICgoCK1bty61ro4dO5ZZ+5AhQ9ClSxecOXMGq1evxrx58wz2075vw4YN0bVr11L3SQvHkspEgyZCLNCOHTsAAD/88AMGDx6st/3OnTtVXZJZaC+tlHa794MHDwTtMy4uzmgf7ZkKY5d2xOrmzZu4cuUKvLy88Mcff8DGRvdburHPSVnHITMz0+DZPl9fXwBA165dsWbNGuGFF/P5558jODgYy5Ytw9SpUw320b5vkyZNqnSxWEJKojlNhFigtLQ0AM8vVZV07do1xMbGVnFF5tG9e3cAwJEjR5Cenq63/fDhwwbbS9OuXTs4OzsjLS0N+/bt09uen5+P3377DQAQFhYmoGrLpf2c1KlTR2/ABACbN282+Drtcdi5c6fBtZ+2bt1q8HX9+vUDAOzbt89slyy7deuGQYMGIT09HcuWLTPYp0ePHrC1tUVUVBSSk5PLtX/tnC1DOQkpLxo0EWKBtBOzv/vuO/7SBAAkJSVhwoQJov0B0L17d7Ru3RrZ2dmYNWsWioqK+G2PHz/G22+/Xe592tvbY8aMGQCAt99+W+dMlVKpxJw5c/DkyRMEBgbipZdeqngIC9K4cWPI5XJcvXpVbzHQ/fv3Y9WqVQZfN3LkSPj4+CA+Ph7vv/++zmfs5s2bWLJkicHXtWnTBiNGjEBiYiKGDx9u8ExVbm4utmzZwt+sYIqlS5dCJpNh9erVOrVo1a5dG7NmzUJubi4GDRqEq1ev6vUpLCzEvn37cPPmTZ32evXqAUCpc74IMRUNmgixQAsXLoStrS1++uknNGnSBKNHj0a/fv3QoEEDFBYWYtiwYdVdoiAcx2Hz5s1wd3fHli1bUL9+fYwePRqDBg1C48aN4e7ujs6dOwMAbG1tTd7v4sWL0aNHD9y9exfNmjXDgAEDMGbMGDRs2BA//fQTatWqhZ07d5Zrn2Lg4eGBmTNnQq1Wo0ePHggNDcXYsWPRtm1bDB482OgcIUdHR2zevBn29vb44osv0KRJE7z88svo06cPWrdujeDgYH5Cesn/s/Xr16NHjx44fPgwmjRpgg4dOmD06NEYNWoUOnToAHd3d4wfP75cZwxbtGiBCRMmID8/3+g8veXLl2Ps2LE4f/48goKC8OKLL+Kll17CmDFj0K1bN9SqVQtDhgzRG8iNGDECwPO7VUePHo0pU6ZgypQpePbsmcn1EaJFgyZCLFDHjh1x8eJFDB48GLm5udi3bx/u3buHWbNmISYmhr+rS4xatGiBv//+G6+88gqUSiX27NmDGzduYM6cOTh+/Dh/hqI8z8+zs7PDkSNHsHbtWrRu3RrR0dH4448/oFAoMGvWLFy+fBlt27atrEjVatWqVfjll1/Qpk0b/P333zh06BAcHR3x22+/4ZNPPjH6uvDwcJw7dw7Dhg1DWloa9uzZg4cPH+Kzzz7D5s2b8eTJE8hkMr2lIVxcXHDs2DFs3boVPXv2REJCAv744w9EREQgPz8f48aNwx9//FHu5xUuWbLE6N1zAGBjY4MtW7bg0KFDGDp0KJKTk7Fv3z4cPXoUaWlpGDRoELZu3cpfetT65JNP8O6776JGjRrYs2cPf+dfdnZ2ueojBAA4Vt7bbwghpJLExcWhYcOGcHFxQVpaWpXe1Ub+59SpUwgJCUHLli1x5cqV6i6HEItB35EIIVUqNzfX4PySBw8eYNy4cdBoNJg4cSINmCpZSkqKwTsO//33X/4utldffbWqyyLEotGZJkJIlYqPj0dgYCAaNGiAxo0bw9XVFQkJCfjnn39QWFiI1q1b49SpU6K+BCkGUVFRCAsLwwsvvID69evDwcEBcXFx+Oeff6DRaNCrVy8cOnTI4F15hEgVDZoIIVUqJycHixcvRkREBBISEpCRkQFHR0c0adIEI0aMwKxZs+Do6FjdZVq9x48fY+nSpfjzzz/x6NEjZGdnw8XFBc2bN8fYsWMxdepUGjARUgINmgghhBBCTECTBgghhBBCTEDnXstBo9Hg8ePHcHFxqZbnYxFCCCHE/BhjyM7ORp06dUq9CYUGTeXw+PFj/hlIhBBCCLEuiYmJ/CryhtCgqRxcXFwAPP9PtaQ7e7QjZCmdAaPMlNlaUWZpZAakmdtSM2dlZcHX15f/OW8MDZrKQXuAXV1dLWrQpFQqERUVhf79+/MPp7R2lJkyWyvKLI3MgDRzW3rmsgZyNBGcEEIIIcQENGgihBBCCDEBDZqshBQXoaPM0kCZpUGKmQFp5hZzZlrcshyysrLg5uaGzMxMi5rTRAghhBDhTP35TmearIBGo0FycjI0Gk11l1JlKLM0UGZpkGJmQJq5xZ6ZBk1WQK1WIyYmBmq1urpLqTKUWRooszRIMTMgzdxiz0yDJkIIIYQQE9CgiRBCCCHEBBWawv7gwQM8fPgQqampcHR0hKenJ5o2bQp7e3tz1UdMwHGcxa2uWtkoszRQZmmQYmZAmrnFnrncd89FRkZiw4YNOHnyJJKSkvS2KxQKtGvXDsOGDcOkSZNQq1YtsxVb3ejuOUJIZWJqNTRXY8HSUsG5e0DWMgicXF7dZRFi9Uz9+W7yoGnHjh1YtGgRbt++DcYYfH190a5dO9SuXRvu7u7Iz89HWloabt26hUuXLqGwsBB2dnYYP348lixZAh8fH7OFqy6WOmjSaDRITEyEr69vqU9ntiaUmTJbG1V0JIq++wosNZlv4zy8YDtjLmyCw6qxssonpeNcnBRzW2pmU3++m3R5rlOnTjh//jxefPFFrFy5EiNHjkTdunWN9lcqlTh16hQ2b96MHTt24LfffsOmTZswbNiw8ichZVKr1YiNjUWdOnUs6kNYmSgzZbZUjDHk5eWV70Uxp4DlH4MBKH7RgqUmo3Dxeyh872Ogc/dy7dLR0VE0l0DEeJzNQYq5xZ7ZpEGTra0tTpw4gfDwcJN2qlAo0KNHD/To0QOrVq3CypUrkZCQUKFCCSFEDHJzc8t8UnpxMgDXu7ZGHTtbyAwMcjSM4dHHC9D89GWUZ2Wb7OxsODs7l+MVhJCymDRoOnXqlOA3qFGjBj755BPBryeEEDEp71mmrjVcUM/ezuh2GcfB194OXWu4IDoju1x10KCJEPMS37kxoofjOHh6eormVLw5UGZpEGNmR0fHcvX3tlOYtZ/QOqqTGI+zOUgxt9gz07PnysFSJ4ITQixHuec0XY0FPphbdr9PvwJaBpm8WzHNaSKkulX6s+dSUlLwxRdfYOTIkejVqxfCw8P1/vTo0aPc+71w4QJmzpyJ5s2bw8nJCX5+fhg1ahRu376t1/fGjRvo27cvnJ2d4e7ujldeeQUpKSl6/TQaDb744gsEBgbC3t4erVq1wrZt2wTltkRqtRo3b94U7bL0QlBmaRBjZo7j4OTkZPIfx/adwHl4lb5Pz9pwbN+pXPsV04BJjMfZHKSYW+yZBS1ueeXKFYSHhyM9PR2lnagS8kX7+eef4/Tp0xg5ciRatWqFJ0+eYM2aNXjxxRdx9uxZtGjRAgDw8OFDdO/eHW5ubli6dClycnLw5Zdf4urVqzh//jxsbW35fb7//vtYvnw5pk6divbt22Pv3r0YO3YsOI7DmDFjyv8fYGE0Gg1u3bqFBg0aQC6RNV0oM2W2FpxcDtsZc1G4+D2jfWynv2XV6zVJ4TgbIsXcYs8saND09ttvIy0tDR988AEmT56MevXqmS383LlzsXXrVp1Bz+jRo9GyZUssX74cmzdvBgAsXboUubm5+Pvvv+Hn5wcA6NChA3r16oUNGzZg2rRpAIBHjx5h5cqVmDFjBtasWQMAmDJlCkJCQjBv3jyMHDlSlAeOEGI9bILDgEXL9ddp8qwN2+lvWf06TYSIhaBBU0xMDIYOHYolS5aYux506dJFr61Ro0Zo3rw5bty4wbft3r0bAwcO5AdMANCzZ080btwYO3bs4AdNe/fuhVKpxPTp0/l+HMfhjTfewNixYxETE4Nu3bqZPQchhJSHTXAY5F26oyj2Ii5FnESb8B6wDWpn1WeYiLgIWoOsxOszMzORkZGBnJwcnZMj5VGd8/UEDZpsbW3RoEEDc9diFGMMT58+RfPmzQE8P3uUnJyMdu3a6fXt0KEDDh06xP/70qVLcHJyQrNmzfT6abcbGzQVFhaisLCQ/3dWVhaA54t3KpVKAIBMJoNcLodarYZG879VVLTtKpVK5xKmXC6HTCYz2q7dr5aNzfNDpFKpjLar1WrUq1cParUaCoUCGo1G53oxx3GwsbEx2m6s9urMVJyhTBqNhh8wF9+/mDOVdZwYY/xxtpZMZR0n7WdbW681ZCrZXrJ2WasXYSezBV54ASqNBvj/14g5U1nHqfj3MGvJZMpxUqvV8PX1Bcdxev0tMVNmZiZq1qyJ6paTkwN7e3uzHqeS///GCBo0hYSE4OLFi0JeKsiWLVvw6NEj/syW9pl3hh7N4uPjg7S0NP4xLklJSahdu7beqFT72sePHxt932XLlmHx4sV67ceOHeNv5/Xz80ObNm1w5coVnQU8mzRpgqZNm+L8+fM6k9ODgoLg7++PU6dOITv7f2uudO7cGV5eXjh27JjOF1VYWBgcHBx0BoIA0L9/f+Tn5yMyMpJve/LkCQYMGIDU1FTExMTw7S4uLggPD0diYiJiY2P5dk9PT3Tp0gV37tzBrVu3+HZLymRjY1NqpgcPHlhdJmPH6f79+3j48CEePnxoNZlMPU5OTk5Wl6m049SmTRtERERYVabSjtPJkyehUqn4z7Y1ZCrPccrLyxNFppMnT8JSmPs4mXoGTdCSA/fu3UOnTp0wf/58vPPOO+V9ebncvHkTHTt2RPPmzREdHQ25XI7o6Gh0794d27dvx6hRo3T6f/TRR/jkk0+Qnp6OGjVqoEePHkhKSsL169d1+mk0GsjlcsyZMwdff/21wfc2dKbJ19cXqamp/C2JlvDbvlqtxrVr19C8eXN+9G1Jv51U1pmm69ev8zcGWEOmso5TUVERrl69iubNm/N1iD2TKWearl27hpYtW8LW1tYqMpVsL1k7APz777944YUXdB4zIeZMZR2ngoIC/nuYXC63ikymnmm6fv06WrVqpfcZsMRMRUVFOoOL8h6noqIi1K5dGwDw4MEDuLq6Csrk4uICjUZj1uOUlZUFDw8P8zx77rXXXtNra9GiBebPn4/vv/8eQUFBBt+E4zj88ssvpryFQdozJ25ubti1axc/YdvBwQEAdAY0WgUFBTp9HBwcTOpniJ2dHezs9FfqVSgUUCh0F5qTy+UGJ5RrPzymtpfcr6ntDx8+RKtWrQA8//AYeqaPsXZjtVd3puJK1q5UKpGQkIAWLVoY7C/GTGW1cxzHH+fi+xNzJlOOU/HPtrVkKq5k7WV9tsWYqax2uVxu8LMt5kym1p6YmIiWLVuKIpOtra3geUgAkJuVheAaLvC2U8AtIQ6unbsJnrNn7uNk7Hjp7ceUThs2bDC67f79+7h//77BbRUZNGVmZqJfv37IyMhAdHQ06tSpw2/TXlrTXqYrLikpCe7u7vxgx8fHB5GRkWCM6Vyi0762+H4JIYQQYn6q6EhgzUocbvv/84s/fhf5Hl6wnTFXVHeHmjRoiouLq+w6dBQUFGDQoEG4ffs2Tpw4gRdeeEFne926deHp6WlwXtX58+cRFBTE/zsoKAg///wzbty4obOfc+fO8dsJIYQQUjlU0ZEG1yFjqcnP2xctF83AyaRBk7+/f2XXwVOr1Rg9ejRiYmKwd+9edO7c2WC/ESNGYOPGjUhMTISvry8A4OTJk7h9+zbeeustvt+QIUPw1ltvYe3atfw6TYwxfP/996hbt67BJQ7ERiaToUmTJgZP11oryiwNlFkapJgZkEZuplaj6LuvSu1TtHYV5F26i2J5DZMngq9fvx7h4eGVPoB688038c0332DQoEF6k7wBYPz48QCeXwdu06YNatSogTlz5iAnJwcrVqxAvXr1cOHCBZ25SO+++y5WrFiBadOmoX379tizZw8OHjyILVu2YOzYsSbXRs+eI4QQQkynjv0bBe9ML7Of/ZdrIQ9qWwUVGWbqz3eTB00ymQwcx8Hf3x9hYWH8n7p165qtaAAIDQ3Fn3/+aXR78XKvXbuGuXPn4q+//oKtrS0GDBiAlStX8rPztTQaDT7//HP88MMPSEpKQqNGjbBgwQKMGzeuXLVZ6qBJpVLh/Pnz6NChg9HJbtaGMlNma0WZpZEZkEZuVcRRFC79qMx+dguXwCa8TxVUZJipP99NPkqzZ89GVFQUrl69ivXr1/OTwxs0aICwsDCEh4cjNDRUb8BSXlFRUSb3bd68OY4ePVpmP5lMhgULFmDBggUVqMxyMcaQkpJS6nMArQ1llgbKLA1SzAxIIzfn7mHWftXN5EGTdi2j9PR0/Pnnn4iMjERUVBT+/fdf3L17Fz///DMAoGnTpvxZqNDQUNSqVatSCieEEEKIZZO1DALn4aXzTMWSOM/akLUMqrqiKqDc5wNr1qyJoUOHYujQoQCAtLQ0REVF8YOo69ev4+bNm1i3bp3BBb4IIYQQIj6Cnz03ZTqw/GMwAIaeGMcmv4G8/1870RTV+ew5QSuCG1NUVIR9+/ZhyZIl+Pfff8FxnM4Kn2JnqXOaNBoNfxehNd+FURxlpszWijJLIzMgvty5ublwdnYW9NrBnjXxRWM/1LP/301aiQWFmH87AftS0su1r5ycHDg5OQmqwxizTwQ3RKVS4dy5c4iMjERkZCRiYmJQWFgIxhiaNm2KkJAQrFu3TujuLY6lDpoIIYSQylaRQRMAyAB0/f8VwZ8UKnE6IxuaMl+lrzoHTeW6PKfRaHDhwgV+kHT69Gnk5+cDAF544QVMnjwZISEh6N69O7y8vCqWgJhMpVLh1KlT6N69u9XegVESZabM1ooySyMzIL7cjo6OyMnJEfx6xhiysrIQExODHj16mPzoEkN1VBeTj1L//v1x+vRp5OTkgOM4tGzZElOmTOEHSTThu/owxpCdnW3Vd2CURJmlgTJLgxQzA+LLzXFchc/w2NnZwcbGBk5OToIHTdXJ5EHTkSNHIJPJMHz4cHz00Udo2bJlZdZFCCGEEGJRTJ55NnjwYLi5uWH37t0ICgpCw4YNMXXqVGzZsgUPHz6szBoJIYQQQqpduSaCM8YQGxvLLy8QHR2NzMxMcByHgIAAhIaG8n+0z4OzJpY6EVyj0SA1NRUeHh6iuAPDHCgzZbZWlFkamQFp5rbUzFVy95xGo8GlS5cQERGBqKgo/PXXX8jOzuYHUWFhYfyil9bAUgdNhBBCCBHO1J/vFRrmyWQytG3bFvPmzcPBgweRnp6OnTt3onnz5oiLi8P69esrsntiIqVSiYMHD0pqIVHKLA2UWRqkmBmQZm6xZ67wPY5JSUn85brIyEjcv3+f3yaXyyu6e2IilUpV3SVUOcosDZRZGqSYGZBmbjFnLveg6enTpzqPTblz5w6A5/OdOI5D69at+WfPde/e3ewFE0IIIYRUB5MHTdOnT0dUVBRu3boF4H+DpBYtWiA0NBRhYWEICQlBzZo1K61YQgghhJDqYvJEcO0s92bNmvFnkkJDQyW1qKWlTgTXLpDm4uJSbQ8xrGqUmTJbK8osjcyANHNbamazP0Zl27ZtCA0NRe3atc1SIDEvBweH6i6hylFmaaDM0iDFzIA0c4s5s8l3z40ePZoGTBZKpVLh0KFDop5cV16UWRooszRIMTMgzdxiz2zSoCkhIaHCb/To0aMK74MQQgghpLqYNGhq1KgRZsyYgbi4uHLtXKlUYtu2bWjevDl++eUXQQUSQgghhFgCkwZNn3/+ObZv346GDRsiJCQEq1evxoULFwwuTvXw4UPs3r0br7/+Onx8fDB+/Hj4+/tj7NixZi+eEEIIIaSqmHz3XEZGBr766iv88ssvSEpKAsdxkMlkqFGjBmrUqIGCggKkpaWhoKDg+Y45Dn369MHbb7+N8PDwSg1RVSz57jmVSgUbGxuLuhuhMlFmymytKLM0MgPSzG2pmSvt2XNqtRqHDx/GyZMncebMGTx8+BDPnj2Dg4MDPD090bJlS4SEhGDIkCHw9/evcBBLYsmDJku8hbMyUWbKbK0oszQyA9LMbamZK+3Zc3K5HAMHDsSqVatw7tw5PHr0CAUFBUhPT8ft27exe/duzJ492+oGTJZMpVIhMjJStHcjCEGZpYEyS4MUMwPSzC32zBV6YC8hhBBCiFTQoIkQQgghxAQ0aLISNjblfvay6FFmaaDM0iDFzIA0c4s5c7kngkuZpU4EJ4QQQohwlTYRnFgejUaD5ORkaDSa6i6lylBmaaDM0iDFzIA0c4s9Mw2arIBarUZMTAzUanV1l1JlKLM0UGZpkGJmQJq5xZ6ZBk2EEEIIISagQRMhhBBCiAkqNIX9/PnzuHDhAjIyMgyeauM4Dh9++GFF3oKYgOM4i1tdtbJRZmmgzNIgxcyANHOLPbOgu+fS0tIwdOhQnD59GqW9nOM40V63NITuniOEEEKsT6XePTd37lz89ddfCAkJwfr163H8+HFERkbq/YmIiBBUfE5ODhYtWoS+ffvC3d0dHMdhw4YNBvvu2LEDnTp1Qo0aNVCrVi2EhITg4MGDev00Gg2++OILBAYGwt7eHq1atcK2bdsE1WdpNBoNHjx4INq7EYSgzNJAmaVBipkBaeYWe2ZBl+cOHDiADh064OTJk5Vyii01NRVLliyBn58fWrdujaioKIP9Vq9ejdmzZ2PAgAFYvnw5CgoKsGHDBgwcOBC7d+/G8OHD+b7vv/8+li9fjqlTp6J9+/bYu3cvxo4dC47jMGbMGLNnqEpqtRqxsbGoU6cOZDJpTFOjzJTZWlFmaWQGpJlb7JkFDZry8/PRvXv3Srsm6ePjg6SkJHh7e+PixYto3769wX6rV69G+/btsX//fr6W1157DXXr1sXGjRv5QdOjR4+wcuVKzJgxA2vWrAEATJkyBSEhIZg3bx5GjhwJuVxeKVkIIYQQYh0EDfOCgoIQHx9v5lL+x87ODt7e3mX2y8rKgpeXl87gzdXVFc7OznBwcODb9u7dC6VSienTp/NtHMfhjTfewMOHDxETE2PeAIQQQgixOoLONC1atAiDBw/G2bNn0alTJ3PXZLLQ0FDs2rULq1evxqBBg1BQUIDVq1cjMzMTc+bM4ftdunQJTk5OaNasmc7rO3TowG/v1q2b3v4LCwtRWFjI/zsrKwsAoFQqoVQqAQAymQxyuRxqtVrnGq22XaVS6UyWl8vlkMlkRtu1+9XSPqNHpVIZbVepVKhVqxZUKhUUCgU0Go3OBHyO42BjY2O03Vjt1ZmpOEOZ1Go1PD09wRjT2b+YM5V1nDQaDX+crSVTWcdJ+9nW9rGGTCXbS9bOGIOnp6feTTRizlTWcSr+PcxaMplynFQqFTw8PABAr79YM5XWrlAowBjTOdaWkqlkNmNMGjRt2rRJr23AgAEICQnBuHHj8OKLLxqdbT5hwgSTChHi22+/RWpqKmbPno3Zs2cDADw8PHDy5El07tyZ75eUlITatWvrXU708fEBADx+/Njg/pctW4bFixfrtR87dgyOjo4AAD8/P7Rp0wZXrlxBQkIC36dJkyZo2rQpzp8/j5SUFL49KCgI/v7+OHXqFLKzs/n2zp07w8vLC8eOHdP5oIWFhcHBwQGHDh3SqaF///7Iz89HZGQk3xYREYEBAwYgNTVV5+yZi4sLwsPDkZiYiNjYWL7d09MTXbp0wZ07d3Dr1i2+3ZIy2djYlJrpwYMHVpfJ2HGKi4vDs2fPcOzYMavJZOpxiouLs7pMpR2nLl26ICIiwqoylXacIiIioFKp+M+2NWSyxuNkjkwZGRk638csJVNeXh5MYdKSAzKZTG/AUfJlhrabY8kB7Zym9evXY9KkSTrbcnJyMH/+fOTm5mLgwIHIzs7GqlWrkJKSgujoaDRs2BAA0KNHDyQlJeH69es6r9doNJDL5ZgzZw6+/vprvfc2dKbJ19cXqamp/CDREkbyarUa9+7dQ4MGDWBvb2+Vv52UrF2j0SAuLg4NGjTQ6SvmTGUdp6KiIty5cwcNGjTg6xB7prKOk/az3ahRI9ja2lpFppLtJWsHgHv37iEwMFBnoqyYM5V1nAoKCvjvYXK53CoymXKc1Go14uLi0KhRI73PgFgzldauUCigVCpx+/Zt/lhbSqasrCx4eHiUueSASWea1q9fb0q3Kjdy5EjY2Nhg//79fNuQIUPQqFEjvP/++9i+fTsAwMHBQWfwo1VQUMBvN8TOzg52dnZ67QqFAgqFQqdNLpcbnEyu/fCY2l5yv6a23717F40bNwbw/MNj6K4EY+3Gaq/uTMWVrF2pVOLWrVto0KCBwf5izFRWO8dx/HEuvj8xZzLlOBX/bFtLpuJK1l7WZ1uMmcpql8vlBj/bYs5kau23b99Gw4YNrSpTWe2GjnV1ZzJWq95+TOk0ceJEk3ZWle7fv48jR47gxx9/1Gl3d3dHt27dcPr0ab7Nx8cHkZGR/NkvraSkJABAnTp1qqZoQgghhIiW+BZJ+H9Pnz4FAIOX/5RKpc5pwaCgIOTl5eHGjRs6/c6dO8dvJ4QQQggpjaBB04EDBzB8+HCjE6gfP36M4cOH4/DhwxUqrjQNGzaETCbD9u3bda5TPnz4ENHR0WjTpg3fNmTIECgUCqxdu5ZvY4zh+++/R926ddGlS5dKq7MqyGQy+Pn5iXKhMKEoszRQZmmQYmZAmrnFnlnQs+f69euHx48f4/Lly0b7tGnTBnXr1sWBAwcEFbZmzRpkZGTg8ePHWLduHYYPH84PhGbNmgU3NzdMnToVP//8M8LCwjB8+HBkZ2dj7dq1SEpKQkREBLp3787v791338WKFSswbdo0tG/fHnv27MHBgwexZcsWjB071qSa6NlzhBBCiPUx+ec7E8DHx4dNnTq11D7/+c9/WN26dYXsnjHGmL+/PwNg8E9cXBxjjDGlUslWr17NgoKCmLOzM3N2dmZhYWEsIiJCb39qtZotXbqU+fv7M1tbW9a8eXO2efPmctWUmZnJALDMzEzBuSqDSqVi//zzD1OpVNVdSpWhzNJAmaVBipkZk2ZuS81s6s93QefH0tLS4OXlVWofDw8PpKamCtk9ACA+Ph6MMYN/AgICADyfDT9z5kxcunQJ2dnZyM7ORkREBMLCwvT2J5PJsGDBAsTHx6OwsBD//vsvxo0bJ7g+S6LRaJCQkCDaByAKQZmlgTJLgxQzA9LMLfbMggZNnp6eOotNGXLr1i24u7sLKooQQgghxNIIGjR1794d+/fvx5UrVwxuv3z5Mvbt24eQkJAKFUcIIYQQYikEDZrmz58PAOjWrRuWLFmCmJgYJCQkICYmBosXL0ZwcDB/OYxUPplMhiZNmoj2bgQhKLM0UGZpkGJmQJq5xZ5Z0N1zALB7925MnDgR+fn5Ou2MMTg7O2PTpk0YOnSoOWq0GHT3HCGEEGJ9TP35LnioN2LECNy/fx/Lli3D8OHD0aNHD4wYMQJffPEF7t27Z3UDJkumUqlw5swZvef8WDPKLA2UWRqkmBmQZm6xZzbpMSrGeHl54d133zVXLUQgxhhSUlL0HvhozSizNFBmaZBiZkCaucWeWZwXFQkhhBBCqliFBk1btmxBr1694OnpCTs7O3h6eqJXr17YunWrueojhBBCCLEIgi7PqdVqjBo1Cnv27AFjDPb29qhTpw6ePn2KkydPIiIiArt378bOnTtFO0NeTORyOYKCgiCXy6u7lCpDmaWBMkuDFDMD0swt9syCRjTffvst/vjjD3Tt2hWnT59GXl4e4uLikJeXhzNnzqBbt27Ys2cPVq9ebe56iQEymQz+/v6SGqBSZmmgzNIgxcyANHOLPbOgqjdu3IjGjRvj5MmT6Ny5s862Tp064cSJE2jcuDHWr19vliJJ6VQqFSIiIkR7N4IQlFkaKLM0SDEzIM3cYs8saNB0+/ZtDB48GAqFwuB2hUKBQYMG4fbt2xUqjpiGMYbs7GzR3o0gBGWWBsosDVLMDEgzt9gzCxo02draIjc3t9Q+ubm5sLW1FVQUIYQQQoilETRoatOmDXbs2IHHjx8b3J6UlIQdO3bgxRdfrFBxhBBCCCGWQtBjVPbv348hQ4bA29sbb7/9NkJCQlC7dm08ffoUUVFR+Oqrr/D06VPs3bsXAwcOrIy6q4WlPkZFo9EgNTUVHh4eop1cV16UmTJbK8osjcyANHNbamZTf74LfvbcV199hffeew9qtVqnnTEGGxsbfP7553jrrbeE7NpiWeqgiRBCCCHCVfqz5+bOnYubN2/i448/xtChQxEeHo6hQ4diyZIluHnzptUNmCyZUqnEwYMHoVQqq7uUKkOZpYEyS4MUMwPSzC32zBV69lz9+vXx4YcfmqsWUgFivX2zIiizNFBmaZBiZkCaucWc2SwXFFUqFdLT00X9H0EIIYQQUhrBgya1Wo1Vq1ahdevWsLe3h4eHB+zt7dG6dWt8/fXXNIAihBBCiFURNBE8JycHffr0wdmzZyGTyeDr68vfPZeYmAiNRoPOnTvj6NGjcHJyqoy6q4WlTgTXLhbm4uICjuOqu5wqQZkps7WizNLIDEgzt6VmrtSJ4B999BFiYmLw8ssv4969e7h//z5iYmJw//593Lt3D2PGjMGZM2fw0UcfCQ5AysfBwaG6S6hylFkaKLM0SDEzIM3cYs4saNC0Y8cOtGvXDps3b4afn5/ONj8/P2zZsgVt27bF9u3bzVIkKZ1KpcKhQ4ckdUmUMksDZZYGKWYGpJlb7JkF3T337NkzTJgwodQ+PXv2xDfffCOoKEKsEWMMeXl5gl+bmZmJjIwM5OTkVOgRRY6OjhZ1WpwQQsRC0KCpUaNGSE5OLrVPSkoKGjZsKKgoQqxRXl4enJ2dq7sM5OTkWNVcQ0IIqSqCLs/NmTMH27dvx7Vr1wxuv3r1Kn777Te8+eabFamNEEIIIcRiCLp77tSpU1i5ciWOHTuGiRMnolu3bvzdc9HR0di0aRP69OmDuXPn6r22e/fuZim8Oljy3XMqlQo2NjaSuewixswVuTyXm52Fl5o1gbedAmu3bYdD2w6AXC5oX2K6PCfG41xRlFkamQFp5rbUzJX67DmZTAaO46B9afHghtqKK/msOjGx5EGTJd7CWZmklFkVHYnCNSuBZyl8G+fhBdsZc2ETHFaNlVU+KR1nLcosjcyANHNbamZTf74LmtP00UcfWVRYqVOpVIiMjET//v2hUCiqu5wqIZXMquhIFC5+T6+dpSY/b1+03KoHTlI5zsVRZmlkBqSZW+yZBQ2aPv74YzOXQQgpianVKPruq1L7FK1dBXmX7uAEXqojhBBiOrM8e44QYn6aq7FgqaXfpcpSnkJzNbZqCiKEEIkTdKZJ69KlS9i2bRtu3ryJvLw8nDhxAgDw4MEDnDt3Dj179oS7u7tZCiWls7Gp0KEUJWvPzNJSzdpPrKz9OBtCmaVDirnFnFnQRHAAePfdd7Fy5Uqdid/aSd7x8fFo2LAhVq5ciTlz5piv2mpmqRPBiXVSx/6Ngneml9nP/su1kAe1rYKKCCHEOlXqs+fWr1+PL7/8EgMHDsSVK1ewYMECne0BAQHo0KED9u3bJ2T3yMnJwaJFi9C3b1+4u7uD4zhs2LDBYF+NRoN169YhKCgIDg4OqFWrFsLDw3H58mW9fl988QUCAwNhb2+PVq1aYdu2bYLqszQajQbJycnQaDTVXUqVkUJmWcsgcB5epfbhPGtD1jKoagqqBlI4ziVRZumQYm6xZxY0aFq7di2aNWuG3bt3o0WLFgYf6dC0aVPcuXNHUFGpqalYsmQJbty4gdatW5fa97XXXsPs2bPRtm1brF69Gh999BH8/Pz0Vix///33MX/+fPTq1QurV6+Gn58fxo4di99++01QjZZErVYjJiZG1Ms5lJcUMnNyOWxn6K91Vpzt9LesehK4FI5zSZRZOqSYW+yZBV1YvH79OqZOnVrqdcnatWuX+agVY3x8fJCUlARvb29cvHgR7du3N9hvx44d2LhxI37//XcMGzbM6P4ePXqElStXYsaMGVizZg0AYMqUKQgJCcG8efMwcuRIyK34Bw+xDIIWt3yxA/Dex9D8uBqytGf/a/fwBKbMQOGLHVCYm1uuXYppcUtCCLEkggZNNjY2KCoqKrXP48ePBT9ny87ODt7e3mX2++qrr9ChQwcMGzYMGo0G+fn5Bp+ptXfvXiiVSkyf/r/5IRzH4Y033sDYsWMRExODbt26CaqVEFNV5NlzMgBda7jA206BJ4VKnM44D832g4L2Rc+eI4QQYQQNmlq2bImIiAio1WqDZ2i0d9K1bVt5k1OzsrJw/vx5TJ8+HQsXLsTq1auRk5ODwMBALF++HKNGjeL7Xrp0CU5OTmjWrJnOPjp06MBvNzRoKiwsRGFhoc57AoBSqYRSqQTwfHV0uVwOtVqtc41W265SqVB8rr1cLodMJjPart2vlvZsnkqlMtquUqng7OwMlUoFhUIBjUajc+qT4zjY2NgYbTdWe3VmKs5QJrVaDRcXFzDGdPZvyZlKvqY8NACiM7IFv764krVU5nGq6GdP+9nW9rGEz15lfz0xxuDi4qJ3+ULMmco6TsW/h1lLJlOOkzY3AL3+Ys1UWrtCoQBjTOdYW0omU78/Cxo0vfbaa5gyZQpef/11/nKXVlZWFqZMmYInT57gm2++EbJ7k9y7dw+MMfz222+wsbHBF198ATc3N3zzzTcYM2YMXF1d0bdvXwBAUlISateurXdJwsfHB8Dzs2KGLFu2DIsXL9ZrP3bsGBwdHQEAfn5+aNOmDa5cuYKEhAS+T5MmTdC0aVOcP38eKSn/e/xFUFAQ/P39cerUKWRn/++HYOfOneHl5YVjx47pfNDCwsLg4OCAQ4cO6dTQv39/5OfnIzIykm+LiIjAgAEDkJqaipiYGL7dxcUF4eHhSExMRGxsLN/u6emJLl264M6dO7h16xbfbkmZbGxsSs304MED0WQq/nnt27cvkpOTcf78eb6vs7MzQkNDkZCQgCtXrvDtHh4e6NixIy5fvoy7d+/Czs4OHMfB19cXrVu3xuXLl5GYmMj3b9SoEZo0aYKzZ88iNfV/yxG0atUKfn5+OHv2LHJycqr0OFX0sxcXF2dxn73K/HoKDw9HRESEVWUq7ThFRERApVLh2LFjVpPJGo+TOTJlZGQgJyeHP9aWksnUqROClxzQTqJ2dnZGjRo18OjRI7Rt2xY3btxAbm4uJk2ahP/+979Cdq1DO6dp/fr1mDRpEt8eHR3NP/z37Nmz6NixIwDwZ5uaNGmCv/76CwDQo0cPJCUl4fr16zr71mg0kMvlmDNnDr7++mu99zZ0psnX1xepqan8LYmWMJLXaDR49OgR6tatCzs7O6v87aRk7YwxJCUloW7dujrvKeZMZR0npVKJxMRE1K1bFzKZzCoylXWctJ9tX19fKBQKq8hUsr1k7RzH4dGjR/Dx8dH5RU/Mmco6ToWFhfz3MJlMZhWZTDlOGo0GSUlJ8PX11bubTKyZSmvXfg0nJCTwx9pSMmVlZcHDw6Nynj0HAFu3bkVYWBjWrFmDf//9F4wxXLx4Ec2aNcPs2bPxn//8R+iuTeLg4AAACAwM5AdMwPPf1gcNGoTNmzfzT1J2cHDQGfxoFRQU6OyrJDs7O9jZ2em1KxQKvWfmyOVyg5cqjU2WN9Zu7Fk8pbUrlUpcvXoVvr6+AMD/QC3JWLux2qszU0kla1cqlYiNjUWdOnUM9hdjprLaAfDHufj+xJyprONU8rNtDZlKKll7WZ9tMWYqq10mkxn8bIs5kym1K5VKXL58GXXr1rWaTGW1M8YMHuvqzmTqc/AqtCzn1KlTMXXqVOTn5yM9PR2urq6CJ7qWV506dQA8v0uvJC8vLyiVSuTm5sLNzQ0+Pj785ZHiv7klJSXp7IsQQgghxBizPHvOwcEBderUqbIBE/B8oOPt7Y1Hjx7pbXv8+DHs7e3h4uIC4Pk1zby8PNy4cUOn37lz5/jthBBCCCGlEfUDe0ePHo3ExEQcP36cb0tNTcXevXsRHh7On+obMmQIFAoF1q5dy/djjOH7779H3bp10aVLlyqv3Zw4joOnp6ek1t6hzNJAmaVBipkBaeYWe2bBE8Er25o1a5CRkYHHjx9j3bp1GD58ONq0aQMAmDVrFtzc3PD06VO0adMGOTk5mDt3Ltzc3PD9998jMTERMTExOquJv/vuu1ixYgWmTZuG9u3bY8+ePTh48CC2bNmCsWPHmlQTPXuOEEIIsT4m/3xnFsrf358BMPgnLi6O73fv3j02bNgw5urqyhwcHFh4eDg7f/683v7UajVbunQp8/f3Z7a2tqx58+Zs8+bN5aopMzOTAWCZmZkVjWdWKpWK3bhxg6lUquoupcpQZmmgzNIgxcyMSTO3pWY29ee7xV6ei4+PB2PM4J+AgAC+X/369fH7778jMzMTeXl5OHnypMHHrshkMixYsADx8fEoLCzEv//+i3HjxlVhosqj0Whw69Yt0T4AUQjKLA2UWRqkmBmQZm6xZ7bYQRMhhBBCiCURNGjatGkTjh49au5aCCGEEEIslqBB0+TJk3HkyBFz10IEkslk8PPzM7ogojWizNJAmaVBipkBaeYWe2ZBd8/5+flhyJAhWL16dWXUZLHo7jlCCCHE+pj6813QUG/w4ME4fvy4wUeTkKqnVqtx6dIlvaeiWzPKLA2UWRqkmBmQZm6xZxY0aPrss8/g5OSE4cOH49q1a+auiZSTRqNBQkKCaO9GEIIySwNllgYpZgakmVvsmQU9e65NmzYoLCxEbGwsjhw5Ant7e3h5eemt8MlxHO7du2eWQgkhhBBCqpOgQZNGo4GtrS38/Px02ktOjxIwXYoQQgghxCIJGjTFx8ebuQxSETKZDE2aNBHV3QiMMeTl5Ql+bXZ2Njw8PJCXlwcbG0EfYwCAo6OjaJ6BJMbjXFGUWRqkmBmQZm6xZ7bYZ89ZIrp7znxyc3Ph7Oxc3WUgJycHTk5O1V0GIYSQalSpd88Vd/36dfz+++/49ddfK7orIpBKpcKZM2egUqmquxRSiaR4nCmzNEgxMyDN3GLPLPi6xoULFzB16lRcvXqVb3vllVcAAKdOnULfvn3x22+/YfDgwRWv0spV9FJVZmYm7ty5g2bNmsHW1lbQfqr6MpWjoyNycnIEvTY3Nxe1a9cGADx8+BA1atSoUB1iwRhDSkqKpOYKUmZpkGJmQJq5xZ5Z0KDp2rVrCA8Ph0wmw1tvvYWbN2/i8OHD/Pbg4GB4eHhg586dNGgyQV5eXrVfqqrqy1Qcx5nl/ZycnOjyGiGEkCoh6PLcokWLAAB///03vvzyS7Rv315nO8dx6Ny5My5cuFDxCgkhhBBCLICgM01//vknRowYgYYNGxrt4+fnR8+nM5G5LlUlJSXBxcVFcA1iJJfLq7uEKiOXyxEUFESZrRxllg4p5hZ7ZkGDpuzsbHh5eZXaJz8/X7TLpFc1c12qcnFxkdylKrHetiqETCaDv79/dZdRpSizNEgxMyDN3GLPLOgnjq+vr84EcEP++ecfNGjQQFBRRBix3o1Qbmo1gmu4YGRtd6hj/waTyOBcpVIhIiJCOscZlFkqpJgZkGZusWcWNGgaOHAgjh07hhMnThjcvmPHDpw9exZDhw6tSG2knMR6N0J5qKIjgaljcbhtM6xv0RDyRfOQP27o83Yrp13UUwrHWYsyS4MUMwPSzC32zIIGTQsXLkSdOnXQv39/TJ06FRcvXgQArF27Fq+88grGjh2LgIAAzJ0716zFEgOKnXXh/r1s1WddVNGRKFz8HvAsRaedpSajcPF7khg4EUIIqT6C5jR5enrizz//xCuvvIJffvmFb585cyYAoGPHjti2bRvc3NzMUyUxSBUdCaxZicNtmz1v+Phd5Ht4wXbGXNgEh1VvcWbG1GoUffdVqX2K1q6CvEt3cCKdYEgIIcSyVfgxKrGxsTh79izS0tLg6uqKjh076i1BYC0s6TEq/FkXI+wWLbeqgZM69m8UvDO9zH72X66FPKhtFVRU9TQaDVJTU+Hh4SGZCfCUmTJbMynmttTMpv58F/6k0/8XFBSEoKCgiu6GlIMUz7qwtFSz9hMjmUxW5l2r1oYyS4MUMwPSzC32zBUe5j179gwRERH4448/EBERgWfPnpmjLlIKzdVYsNTkUvuwlKfQXI2tmoKqAOfuYdZ+YqRUKnHw4EEolcrqLqXKUGbrx9RqFP59DhdWfo7Cv89Z9bzMkqR2rAHxZxZ8pik+Ph5z5szBwYMHdWbBcxyHgQMH4uuvv0ZAQIA5aiQlSPGsi6xlEDgPr1IHi5xnbchaBlVdUdVArLfpVgRltl6q6EgUffcVWGoyXgCgOvw71FY6L9MYqRzr4sScWdCg6d69e+jatSuSk5PRqFEjdO3aFbVr18bTp09x5swZ7Nu3D2fPnsWZM2dQv359c9cseVI868LJ5bCdMbfUeVy209+ymsuRhFg7Y/MytXfDwsrmZRLrIGjQNH/+fKSkpOD777/H1KlTwXEcv40xhh9//BHTp0/H/PnzsXPnTrMVa60YY8jLyzP9BfUbAbU89W691+HhiYL6jYDcXJN26ejoqHMcLZFNcBiwaDkK16zUyc551obt9LfoGywh1UQ7udf0F6jhsHoFOACGvuswAPlrvkR+o6aAzPRfhCxtcjGxPoLunqtZsyZCQ0Pxxx9/GO0zZMgQnDp1Cunp6RUq0JJU1t1zubm5cHZ2LtdrBnvWxOaWz5/9Jys22NH8/+Ecf/Uu9qWY/n+fk5Mjmkew5GZloZ9/PXjbKfDfP/bCqUNnSZxh0i4K5+LiYvEDXHOhzOLInJyczD8D0xTBNVz+t1RKKfr9fQPRGdkm7/fp06eimmQsxmNdUZaauVLvnlOr1WjevHmpfVq0aIHISFpssLLsS0nH+Kt38UVjP9Szt+PbHxUWYf7thHINmKpDuc+uFZNbUMB/I11dvxG4ggLBdYjhDFtxDg4O1V1ClaPM1sfbTmHWfmJm7cfaEDFnFjRoevHFF3Ht2rVS+1y7dg3t2rUTVJTUODo6IicnR9BrmUqFnAtnce2vaLzYsyd8W72IrQLOujg6Ogp6f6Hy8vLKfXbNEG9v7wq9Xkxn2FQqFQ4dOoT+/ftDobD+HyYAZRZLZg8PDzx9+tTk/rLrV4FPF5bZb+1vO7DmhZblqkNMxHisK0rsmQUNmj777DP06NEDP//8M6ZMmaK3/ccff8TRo0dx8uTJChcoBRzHVegHt11IOLJzC+DUsasoP4SEEHEr79o7rFYo8k24G7ZWcKgkLr0T8RA0aDp58iTCwsLwn//8BytXrtS5e+706dO4ffs2+vTpgxMnTug81JfjOHz44YdmK56IV4XOrjGGzMxMnDhxAoMHD4atrW2F6iCEVC26G5aIlaBB08cff8z//datW7h165ZenyNHjuDIkSM6bTRoIloVPrtmZ4caNWrA2dmZzq4RIkLau2G16zRp0d2wxJIJunvuzz//FPyGISEhgl9b3Szp2XPFMcagUqlgY2MjqknNFUGZKbO1klpmplZDffUSVCnJsPH0grxlG8mcYZLasQYsN3Ol3j1X2QOfnJwcrFixAufOncP58+eRnp6O9evXY9KkSUZfo1Qq0bp1a9y4cQMrVqzAO++8o7Ndo9Hgyy+/xLp165CUlITGjRtjwYIFePnllys1S1XJz8+Hi4tLdZdRpSizNFBm68bJ5ZC3bou87GzYWdht6FVBSsdaS8yZLXIVsNTUVCxZsgQ3btxA69atTXrN6tWrkZCQYHT7+++/j/nz56NXr15YvXo1/Pz8MHbsWPz222/mKrvaqFQqREZGinpp+vKizNJAmaVBipkBaeYWe2aLHDT5+PggKSkJDx48wIoVK8rsn5ycjCVLlmD+/PkGtz969AgrV67EjBkz8OOPP2Lq1KnYv38/goODMW/ePKgl9IBIQgghhAhjkYMmOzu7cq2/895776FJkyYYP368we179+6FUqnE9OnT+TaO4/DGG2/g4cOHiImJqXDNhBBCCLFuFjloKo/z589j48aN+Prrr41eC7906RKcnJzQrJnusv0dOnTgt4udjY2g6WmiRpmlgTJLgxQzA9LMLebM4q0cz2fhz5o1C6NHj0bnzp0RHx9vsF9SUhJq166tN6jy8fEBADx+/Njg6woLC1FYWMj/OysrC8DzSedKpRLA80Xd5HI51Go1NBoN31fbrlKpUPwGRblcDplMZrRdu18t7Yer5PXfku29e/fmt2k0Gp1LjhzHwcbGxmi7sdqrO5OWQqEwWPuAAQOg0Wh09i/2TKUdJ5lMxh9npVJpFZlMOU69e/fmH8JqLZmKtxuqfcCAAVCpVDr1iz1TaccJgM5n2xoymXqc+vfvz+e2lkylHSe5XK5zrC0lU8lsxoh60LRhwwZcvXoVu3btKrVffn4+7Ozs9Nrt7e357YYsW7YMixcv1ms/duwYvyiin58f2rRpgytXruhMRG/SpAmaNm2K8+fPIyUlhW8PCgqCv78/Tp06hezs/z2IsnPnzvDy8sKxY8d0PmhhYWFwcHDAoUOHdGro378/8vPzdZ7vZ2NjgwEDBiA1NVXnkqOLiwvCw8ORmJiI2NhYvt3T0xNdunTBnTt3dNbaEkMmZ2dntGzZErm5ubhy5YpVZCrrON2+fRu3b9+2qkymHqfGjRujWbNmVpXJ2HFq1aoVnJyccPXqVZ0FYMWcyRqPk7kytW/fHk5OToiKirKaTKUdp+TkZJw7d87iMpn6LFRB6zRVpYsXL6J9+/Z6Sw5kZWWhcePG+M9//sMPbOLj4xEYGKi35MDAgQNx48YN3Lt3T2ffeXl5cHJywnvvvYdly5bpvbehM02+vr5ITU3l13GwhJG8UqnE8ePH0atXLzg6Olrlbycla1epVDh27Bj69u3L/6Yq9kxlHaeCggIcPXoUvXr1gkKhsIpMZR0n7We7T58+sLe3t4pMJdtL1q7RaHDkyBH07t1b5zKGmDOVdZzy8vL472EKhcIqMplynLSf7379+uldCRFrptLaFQoFCgsLceTIEf5YW0qmrKwseHh4VM46TZbgyy+/RFFREUaPHs1flnv48CEAID09HfHx8ahTpw5sbW3h4+ODyMhIMMZ0PphJSUkAgDp16hh8Dzs7O4NnqBQKhd4q1HK5HHIDC7IZu3ZrrN3Y6tamtGv/LpPJdAYSWsbajdVuCZm0SstkqL/YMxk7Ttp9Fd+f2DOZcpy0f7emTFola9d+E7exsTFYjxgzldWurbHkZ9saMpnSznGc1WUy1q6tu+Sxru5Mpj5ZwqSJ4NoRXHn/VOZkr4SEBKSnp6N58+YIDAxEYGAggoODAQBLly5FYGAgrl+/DuD56bm8vDzcuHFDZx/aU4RBQUGVVichhBBCrINJo5ru3bvrnTpMT0/HlStXIJfL4evryz+wNzExEWq1Gq1atULNmjUrpWgAmD17NoYOHarTlpycjP/85z+YNGkShgwZgsDAQADAkCFD8NZbb2Ht2rVYs2YNgOeTyL///nvUrVsXXbp0qbQ6qwLHcXCR2Eq6lFkaKLM0SDEzIM3cYs8saE7Tw4cP0bVrVwQHB2Pp0qXw8/PjtyUkJGDBggU4ffo0/vrrL9SrV09QYWvWrEFGRgYeP36MdevWYfjw4WjTpg0AYNasWXBzc9N7jbE5TQDw7rvvYsWKFZg2bRrat2+PPXv24ODBg9iyZQvGjh1rUk2W+uw5QgghhAhn6s93QYOmMWPGID4+HmfPnjXap1OnTggMDMS2bdvKu3sAQEBAAB48eGBwW1xcHAICAvTaSxs0aTQafP755/jhhx+QlJSERo0aYcGCBRg3bpzJNVnqoEmj0SAxMRG+vr4GrwlbI8pMma0VZZZGZkCauS01s6k/3wVVfOLECfTo0aPUPuHh4Thx4oSQ3QN4PgBijBn8Y2jABDwfaDHG9AZMwPN5WQsWLEB8fDwKCwvx77//lmvAZMnUajViY2Ml9TgYyiwNlFkapJgZkGZusWcWNGgqKCjg7zwz5vHjx0bXPyKEEEIIERtBg6a2bdvit99+M/rMtjNnzmD79u1o3759hYojhBBCCLEUgtYE+Oyzz9CjRw8EBwdj0KBB6NatG7y8vJCcnIzo6GgcOHAANjY2+PTTT81dLzGA4zh4enqK9m4EISizNFBmaZBiZkCaucWeWfCK4CdPnsS0adMQFxf3fEccx6+2GRgYiB9//LHMeU9iY6kTwQkhhBAinKk/3wWvPtmjRw/cvXsXf/31Fy5fvozMzEy4ubmhdevW6Natm2hHkWKkVqtx584dNGrUyOAKqdaIMlNma0WZpZEZkGZusWeu0JLdHMchODiYX4mbVA+NRoNbt26hQYMGovwQCkGZKbO1oszSyAxIM7fYM1f4OSfXr1/HzZs3kZubi1deecUcNRFCCCGEWBzBK0tduHABQUFBaNmyJUaOHIlJkybx206dOgVHR0fs27fPHDUSQgghhFQ7QYOma9euITw8HHFxcXjrrbfQr18/ne3BwcHw8PDAzp07zVIkKZ1MJoOfn59Fra5a2SizNFBmaZBiZkCaucWeWdDdcy+99BKOHj2KS5cuoWHDhli8eDGWLFmis8Ln6NGjcfnyZdy8edOsBVcnunuOEEIIsT6V+hiVP//8EyNGjEDDhg2N9vHz8ytz1XBiHmq1GpcuXRLtsvRCUGZpoMzSIMXMgDRziz2zoEFTdnY2vLy8Su2Tn58v2v8UsdFoNEhISIBGo6nuUqoMZZYGyiwNUswMSDO32DMLGjT5+vri6tWrpfb5559/0KBBA0FFEUIIIYRYGkGDpoEDB+LYsWM4ceKEwe07duzA2bNnMXTo0IrURgghhBBiMQSt07Rw4ULs2rUL/fv3x8SJE/HkyRMAwNq1axETE4Nt27YhICAAc+fONWuxxDCZTIYmTZqI9m4EISizNFBmaZBiZkCaucWeWfCz5+7fv49XXnkFMTExets6duzID5ysCd09RwghhFifSr17DgDq16+P06dP459//sHatWvx6aef4ttvv8W5c+cQExNjdQMmS6ZSqXDmzBmoVKrqLqXKUGZpoMzSIMXMgDRziz1zhR+jEhQUhKCgIDOUQoRijCElJQUCTxqKEmWWBsosDVLMDEgzt9gzi/OiIiGEEEJIFRN8pik7Oxu//PILLl++jMePH0OpVOr14TgOJ0+erFCBhBBCCCGWQNCg6cKFC+jXrx/S09NLPcXGcZzgwojp5HI5goKCIJfLq7uUKkOZpYEyS4MUMwPSzC32zILunuvSpQvOnz+PZcuW4eWXX4aPj49o/wPKg+6eI4QQQqxPpd49d+nSJYwZMwbz5s1DvXr1JDFgsmQqlQoRERGivRtBCMosDZRZGqSYGZBmbrFnFjRocnd3h6enp7lrIQIxxpCdnS3auxGEoMzSQJmlQYqZAWnmFntmQYOmoUOHIiIiQrQP3COEEEIIKS9Bg6Zly5ZBoVBg3LhxePTokblrIoQQQgixOIIfo/LPP/+gZ8+eyMzMRM2aNQ1OnOI4Dvfu3atwkZbCUieCazQapKamwsPDQ7TP8ykvykyZrRVllkZmQJq5LTWzqT/fBQ2aTp48iUGDBqGgoAAKhQJeXl6wsTG8ekFcXFx5d2+xLHXQRAghhBDhKvXuufnz54Mxhu3bt6OgoACJiYmIi4sz+IdUPqVSiYMHDxpcYNRaUWZpoMzSIMXMgDRziz2zoMUtr1+/jvHjx2PkyJHmrocIJNbbNyuCMksDZZYGKWYGpJlbzJkFnWny9PSEg4ODuWshhBBCCLFYgs40jRs3Djt37kR+fj4NngghRjHGkJeXJ/i1mZmZyMjIQE5ODmxtbQXX4ejoSI91IoRUmKCJ4EVFRRg9ejTS0tKwdOlStG7dGs7OzpVRn0Wx1Ing2sXCXFxcJPODgTKLI3Nubq5FfG/IycmBk5NTdZdhEjEe54qSYmZAmrktNbOpP98FnWnSnl1ijKF79+5G+3EcJ+prl2IixTN+lNm6yQB0reECbzsFnhQqcTojG1JZTldKx1lLipkBaeYWc2ZBg6bg4OBKGyHm5ORgxYoVOHfuHM6fP4/09HSsX78ekyZN4vtoNBps2rQJv//+Oy5duoS0tDQEBgZizJgxeOedd2Bvb6+3319++QVffvkl4uLi4Ovri9mzZ2PWrFmVkqGqqVQqHDp0CP3794dCoajucqoEZRZHZkdHR+Tk5JT/hTGnoPlxNWRpz/7XVssTmDoD6Gz8F7XS6hALMR7nipJiZkCaucWeWdCgKSoqysxl/E9qaiqWLFkCPz8/tG7d2uB75eXl4dVXX0WnTp3w+uuvw8vLCzExMVi0aBFOnjyJiIgInUHdDz/8gNdffx0jRozA3LlzER0djdmzZyMvLw/z58+vtCyESB3HceW+LKaKjkTh8o/171J5lgIs/xh2i5bDJjjMbDUSQoipBA2aKpOPjw+SkpLg7e2Nixcvon379np9bG1tcfr0aXTp0oVvmzp1KgICAviBU8+ePQEA+fn5eP/99zFgwADs2rWL76vRaPDJJ59g2rRpqFmzZtWEI4SUiqnVKPruq1L7FK1dBXmX7uDk8iqqihBCnrOcNcz/n52dHby9vUvtY2trqzNg0ho2bBgA4MaNG3xbZGQknj17hunTp+v0nTFjBnJzc3Hw4EEzVE0IMQfN1Viw1ORS+7CUp9Bcja2aggghpBhBZ5rCw8NN6sdxHE6ePCnkLQR58uQJAMDDw4Nvu3TpEgCgXbt2On3btm0LmUyGS5cuYfz48Qb3V1hYiMLCQv7fWVlZAJ6vaKpdzVQmk0Eul0OtVkOj+d80VW27SqVC8RsU5XI5ZDKZ0faSq6RqH09TckJ98XbGGHr16sXvT6PRQK1W8305joONjY3RdmO1V2em4hQKhV7tANC/f3+9/Ys5U1nHieM4/jgrlUqryFSy9rIGTFrKlKfQKJWiyFTe4ySTydC/f//nOYvVL+ZMZR2n4t/DlEqlVWQy5TgxxtCvXz/I5XK9/mLNVFq7QqGATCbTOdaWksnUFcorZU4Tx3FgjFX57YRffPEFXF1d0a9fP74tKSkJcrkcXl5eOn1tbW1Rq1YtPH782Oj+li1bhsWLF+u1Hzt2jJ9Y6ufnhzZt2uDKlStISEjg+zRp0gRNmzbF+fPnkZKSwrcHBQXB398fp06dQnZ2Nt/euXNneHl54dixYzoftLCwMDg4OODQoUM6NfTv3x/5+fmIjIzk22xsbDBgwACkpqYiJiaGb3dxcUF4eDgSExMRGxvLt3t6eqJLly64c+cObt26xbeLJVO7du2QlpaGy5cvW02mso7T7du3RZXJ2dkZnTp1QkJCAq5cucK3e3h4oFOnTrh16xbu3LnDtzdVFaIhyvbXv9fxLC0brVq1gp+fH6KionQmnHfo0AFeXl44cuQIVCoV7OzswHGcKL6eWrduDXd3d1y8eNGiPnuV+fV0/Phxq8tk6nEKDg6GXC7X+bkq9kxlHaezZ89aXCZT15MTtE6TMVlZWfjnn3+wcOFC1KtXD9u2bYO8AvMOtHOaSt49Z8jSpUvx/vvvY+3atXjjjTf49smTJ2Pbtm0G/0P8/Pzw4osvYs+ePQb3aehMk6+vL1JTU/l1HCxhJK9UKnH8+HH06tULjo6OVvnbScnaVSoVjh07hr59++o8KVvMmco6TgUFBTh69Ch69erF/8Zm6Zny8vJQo0YNmEoG4HrX1qhjZwuZgV+6NIzhUWERmp++XK7lB9LT0+Hk5CSKMxgajQZHjhxB7969dR6Ebs1nmvLy8vjvYQqFwioymXKctN+7+/Xrp3eSQayZSmtXKBQoLCzEkSNH+GNtKZmysrLg4eFROes0GePq6orQ0FAcPXoULVu2xGeffYaPPvrInG9h0Pbt2/HBBx9g8uTJOgMm4Pl6EEVFRQZfV1BQUOp6EXZ2drCzs9NrVygUerdKyuVygwPE4t/0TGk3dgumKe3av8tkMp2BhJaxdmO1W0ImrdIyGeov9kzGjpN2X8X3Z8mZjL2XMRoA795OwOaWDaFhTGfgpPn/b3jzbyeUe72mkv9nlX2cKvLZ034Tt7GxMVhPdX32quLrSUzHqaz28tTOcZzVZTLWrq275LGu7kymLn9QKXfPubi4oF+/fli/fn2lD5qOHz+OCRMmYMCAAfj+++/1tvv4+ECtViM5OVnnEl1RURGePXuGOnXqVGp9hEhZRdZpYj9993yZgf8n8/QCpszAVitfp4kQYrkqbckBmUyGpKSkyto9AODcuXMYNmwY2rVrhx07dhgcSQYFBQF4fqlPO7lS+2+NRsNvF7vy/kZvDSiz5ROyThMAoGc/sLDeKIq9iCtRUWgVGgrboHaSWWZAbMfZHKSYGZBmbjFnNuucJq379++jU6dOcHd3x82bNwXvp7Q5TTdu3EBwcDC8vb0RHR1tdK2l/Px81KtXD126dMH+/fv59ldeeQW///47EhMT4e7ublI9lvrsOUIIIYQIV6nPnnvttdcMtqtUKjx69Ah//fUXlEollixZImT3WLNmDTIyMvg72/bv34+HDx8CAGbNmgWZTIY+ffogPT0d8+bN01trqUGDBujcuTOA53OaPvnkE8yYMQMjR45Enz59EB0djc2bN+Ozzz4zecBkyTQaDVJTU+Hh4WHwmrA1osyU2VpRZmlkBqSZW/SZmQAcx5X6p2nTpuynn34SsmvGGGP+/v4MgME/cXFxLC4uzuh2AGzixIl6+/zxxx9ZkyZNmK2tLWvQoAFbtWoV02g05aorMzOTAWCZmZmCs1WGoqIitmfPHlZUVFTdpVQZyiwNlFkapJiZMWnmttTMpv58F3SmKS4uzmC7TCZDjRo14OLiImS3vPj4+DL7sHJeVZw6dSqmTp0qsCJCCCGESJ2gQZO/v7+56yCEEEIIsWhmncKelZWFc+fOwd7eHt26davyFcGliuM4uLi4SOr/mzJLA2WWBilmBqSZW+yZBd0999NPP2Hz5s3Ys2cPf9fa5cuX0a9fPzx9+hTA8yXKiz9uxBrQ3XOEEEKI9TH157ugqeu//vorCgsLdW7zf/vtt5GcnIxXX30V/fv3R0xMDNatWydk96ScNBoNHjx4oLOkvLWjzNJAmaVBipkBaeYWe2ZBg6bbt2+jdevW/L+fPXuGyMhITJkyBT///DP279+P9u3bY8uWLWYrlBinVqsRGxur89wea0eZpYEyS4MUMwPSzC32zIIGTRkZGfD09OT/HR0dDQAYPnw439atWzeT7oIjhBBCCBEDQYOmWrVq6Twi5eTJk5DL5ejatSvfxhjTeyIyIYQQQohYCRo0tWrVCnv37sW///6Lu3fvYuvWrejatavOM6bi4+Ph4+NjtkKJcRzHwdPTU7R3IwhBmaWBMkuDFDMD0swt9syC7p6LjIxEz549ddr27NmDQYMGAXg+0cvHxwfh4eHYtm2beSq1AHT3HCGEEGJ9KvXuubCwMOzbtw/Dhg3DsGHDsH37dn7ABACnT59GnTp1dOY4kcqjVqtx8+ZN0U6sE4IySwNllgYpZgakmVvsmQU/LW/AgAHYtWsXdu3ahZdeeklnW3BwMC5duoSRI0dWuEBSNo1Gg1u3bon2Fk4hKLM0UGZpkGJmQJq5xZ5ZhI8YJoQQQgipehV6jMr58+dx4cIFZGRkGDzVxnEcPvzww4q8BSGEEEKIRRA0aEpLS8PQoUNx+vRplDaPnAZNVUMmk8HPzw8ymXROHFJmaaDM0iDFzIA0c4s9s6C75yZNmoRNmzYhNDQUEydORL169WBjY3j8FRISUuEiLQXdPUcIIYRYH1N/vgs603TgwAF06NABJ0+eFO1aC9ZErVbjypUraNWqFeRyeXWXUyUoM2W2VpRZGpkBaeYWe2ZB58fy8/PRvXt3GjBZCI1Gg4SEBNHejSAEZZYGyiwNUswMSDO32DMLGjQFBQXRc+UIIYQQIimCBk2LFi3Cvn37cPbsWXPXQwghhBBikQTNaXry5AkGDBiAkJAQjBs3Di+++KLRiVMTJkyoUIGkbDKZDE2aNBHt3QhCUGZpoMzSIMXMgDRziz2zoLvnZDIZOI7TWW6g5Pwmxhg4jhPtUumG0N1zhBBCiPWp1Lvn1q9fL7gwYn4qlQrnz59Hhw4djC79YG0oM2W2VpRZGpkBaeYWe2ZBFU+cONHcdZAKYIwhJSWl1IVGrQ1llgbKLA1SzAxIM7fYM4vzoiIhhBBCSBWr0Lmx+Ph4bNmyBbGxscjKyoKrqyuCgoIwbtw4BAQEmKlEQgghhJDqJ2giOAB88803ePfdd6FSqfROsykUCnzxxReYM2eOWYq0FJY6EVyj0SAxMRG+vr6ivSOhvCgzZbZWlFkamQFp5rbUzKb+fBc0aDpw4AAGDx4MDw8PvPXWWwgLC4OPjw+ePHmCyMhIfPXVV3j27Bn27duHAQMGVCiIJbHUQRMhhBBChDP157ugYd5XX30Fd3d3/PPPP1iwYAE6deoEf39/dOzYEe+99x7+/vtv1KxZE1999ZXgAMR0KpUKERERUKlU1V1KlaHM0kCZpUGKmQFp5hZ7ZkGDpn/++QejR49GvXr1DG739fXFqFGj8Pfff1eoOGIaxhiys7NFezeCEJRZGiizNEgxMyDN3GLPLGjQVFRUBCcnp1L7ODs7o6ioSFBRhBBCCCGWRtCgqXHjxti/f7/R02sqlQoHDhxA48aNK1QcIYQQQoilEDRomjBhAm7duoU+ffroXYK7ePEi+vXrh1u3btEimFVELpejc+fOkMvl1V1KlaHM0kCZpUGKmQFp5hZ7ZkF3z6nVaowYMQL79u0Dx3FwdHSEl5cXkpOTkZeXB8YYhgwZgt27d1vULYUVRXfPEUIIIdanUu+ek8vl2LNnDzZs2IDQ0FDY2toiISEBtra2CAsLw8aNG/HHH38IHjDl5ORg0aJF6Nu3L9zd3cFxHDZs2GCw740bN9C3b184OzvD3d0dr7zyClJSUvT6aTQafPHFFwgMDIS9vT1atWqFbdu2CarP0iiVShw8eBBKpbK6S6kylFkaKLM0SDEzIM3cYs9coRXBJ0yYgAkTJpirFl5qaiqWLFkCPz8/tG7dGlFRUQb7PXz4EN27d4ebmxuWLl2KnJwcfPnll7h69SrOnz8PW1tbvu/777+P5cuXY+rUqWjfvj327t2LsWPHguM4jBkzxuwZqppYb9+sCMosDZRZGqSYGZBmbjFntshHDPv4+CApKQne3t64ePEi2rdvb7Df0qVLkZubi7///ht+fn4AgA4dOqBXr17YsGEDpk2bBgB49OgRVq5ciRkzZmDNmjUAgClTpiAkJATz5s3DyJEjRXt9lRBCCCFVQ9D1swMHDmD48OF4/Pixwe2PHz/G8OHDcfjwYUFF2dnZwdvbu8x+u3fvxsCBA/kBEwD07NkTjRs3xo4dO/i2vXv3QqlUYvr06Xwbx3F444038PDhQ8TExAiqkxBCCCHSIehM03fffYfHjx+jTp06BrfXqVMHcXFx+O6779CvX78KFWjMo0ePkJycjHbt2ult69ChAw4dOsT/+9KlS3ByckKzZs30+mm3d+vWTW8/hYWFKCws5P+dlZUF4Pk1We31WJlMBrlcDrVaDY1Gw/fVtpd8Np9cLodMJjPaXvI6r43N80NU8nRm8XbGGIKDg/n9aTQaqNVqvi/HcbCxsTHabqz26sxUnEKh0KsdAMLCwvT2L+ZMZR0njuP446xUKq0iU1nHSfvZ5jiOf0+xZyrZXrJ2mUyGsLAwANCpX8yZyjpOxb+HKZVKq8hkynFijCE0NBRyuVyvv1gzldauUCggk8l0jrWlZDJ1jpWgQdPly5cxcODAUvt07NgRBw4cELJ7kyQlJQF4fimvJB8fH6SlpaGwsBB2dnZISkpC7dq1+W+8xfsBMHrGbNmyZVi8eLFe+7Fjx+Do6AgA8PPzQ5s2bXDlyhUkJCTwfZo0aYKmTZvi/PnzOhPTg4KC4O/vj1OnTiE7O5tv79y5M7y8vHDs2DGdD1pYWBgcHBx0BoEA0L9/f+Tn5yMyMpJvs7GxwYABA5Camqpz9szFxQXh4eFITExEbGws3+7p6YkuXbrgzp07uHXrFt8ulkzBwcFITEzE5cuXrSZTWcfp9u3bVpfJlOPUuHFjNGvWzKoyGTtOrVu3Rt26dREdHW01mco6TsePH7e6TKYep169eiE7O1tn7q7YM5V1nM6ePWtxmfLy8mAKQUsO2Nvb45133sGnn35qtM8HH3yAL7/8EgUFBeXdvQ7tnKb169dj0qRJfHt0dDS6d++O7du3Y9SoUTqv+eijj/DJJ58gPT0dNWrUQI8ePZCUlITr16/r9NNoNJDL5ZgzZw6+/vprvfc2dKbJ19cXqamp/C2JljCSVyqVOH78OHr16gVHR0er/O2kZO0qlQrHjh1D3759de7SFHOmso5TQUEBjh49il69evG/sYk9U1nHSfvZ7tOnD+zt7a0iU8n2krVrNBocOXIEvXv35usSe6ayjlNeXh7/PUyhUFhFJlOOk/bz3a9fP71f6sWaqbR2hUKBwsJCHDlyhD/WlpIpKysLHh4eZS45IOhMk6enp87Iz5Bbt27B3d1dyO5N4uDgAAA6gxot7UBN28fBwcGkfiXZ2dnBzs5Or12hUEChUOi0yeVyg5PJi3/TM6W95H7L0679u0wmM7jcg7F2Y7VbQiat0jIZ6i/2TMaOk3Zfxfcn9kymHCft360pk1bJ2rXfxG1sbAzWI8ZMZbVrayz52baGTKa0cxxndZmMtWvrLnmsqzuTsQwlCZoI3r17d+zfvx9XrlwxuP3y5cvYt28fQkJChOzeJNpLa9rLdMUlJSXB3d2dH/D4+PjgyZMneg8I1L7W2NwsQgghhBAtQYOm+fPnAwC6deuGJUuWICYmBgkJCYiJicHixYsRHBwMmUyGBQsWmLXY4urWrQtPT09cvHhRb9v58+cRFBTE/zsoKAh5eXm4ceOGTr9z587x2wkhhBBCSsUE2rVrF3NycmIymUznD8dxzMXFhf3xxx9Cd63jwoULDABbv3693rbXX3+dOTg4sISEBL7txIkTDABbt24d35aYmMgUCgWbMWMG36bRaFhwcDCrW7cuU6lUJtWSmZnJALDMzEzhgSqBRqNhRUVFTKPRVHcpVYYySwNllgYpZmZMmrktNbOpP98FL245YsQIBAcHY8OGDbhw4QIyMzNRo0YNdOjQARMnToSnp2eFBnNr1qxBRkYGf2fb/v378fDhQwDArFmz4ObmhoULF2Lnzp0ICwvDnDlzkJOTgxUrVqBly5Z49dVX+X3Vq1cPb775JlasWAGlUon27dtjz549iI6OxpYtW6xiYcv8/Hy4uLhUdxlVijJLA2WWBilmBqSZW9SZq2gQV27+/v4MgME/cXFxfL9///2X9e7dmzk6OrIaNWqwcePGsSdPnujtT61Ws6VLlzJ/f39ma2vLmjdvzjZv3lyumiz1TFNRURHbs2cPKyoqqu5SqgxllgbKLA1SzMyYNHNbauZKP9NU2eLj403q17x5cxw9erTMfto5VpU5z4oQQggh1kvQRHBCCCGEEKmhQZOVMLYGhTWjzNJAmaVBipkBaeYWc2ZBK4JLVVZWFtzc3MpcMZQQQggh4mHqz3c602QFNBoNkpOTdZaUt3aUWRooszRIMTMgzdxiz0yDJiugVqsRExOj89wea0eZpYEyS4MUMwPSzC32zIIGTdq1kwghhBBCpELQoCkgIABDhgzBgQMHRHuKjRBCCCGkPAQNmjp16oT9+/djyJAh8PPzw0cffWTyukrE/DiOg4uLCziOq+5SqgxllgbKLA1SzAxIM7fYMwu+e+727dv46aef8OuvvyI5ORkymQw9e/bE1KlTMWTIEFHfUmgM3T1HCCGEWJ9Kv3uucePGWLFiBR4+fIidO3eiV69eOHHiBEaNGoW6deti/vz5uH37ttDdk3LQaDR48OCBpC6VUmZpoMzSIMXMgDRziz1zhe+es7GxwYgRI3D48GHEx8dj0aJFkMlk+PLLL9GsWTOEhYVhx44doOWgKo9arUZsbKxo70YQgjJLA2WWBilmBqSZW+yZzbbkgEajwd9//40LFy4gJSUFjDH4+vri9OnTePnll9G6dWvcuXPHXG9HCCGEEFKlKjxoun//PhYuXAhfX18MHz4cx44dw4gRI3Dy5EnEx8cjISEB77zzDm7evIk33njDHDUTQgghhFQ5QbO1lUoldu/ejZ9++gl//vknNBoNAgMDsXTpUrz66qvw8vLi+3p7e+Pzzz9HVlYWNm3aZLbCyf9wHAdPT0/R3o0gBGWWBsosDVLMDEgzt9gzC7p7ztPTE2lpaZDL5Rg0aBD+85//oHfv3qW+Zvny5Vi4cKFoJ38BdPccIYQQYo0q9e45R0dHLF68GA8ePMDu3bvLHDABwPTp0xEXFyfk7UgZ1Go1bt68KdqJdUJQZmmgzNIgxcyANHOLPbOgQVN8fDw++OAD+Pj4mPwaV1dX+Pv7C3k7UgaNRoNbt26J+ixeeVFmaaDM0iDFzIA0c4s9s6BBk1ivRRJCCCGECCVoIvhrr71WZh+ZTAZXV1c0adIEAwcORN26dYW8FSGEEEKIRRA0aNqwYQN/tsnQPHKO43TaZ82ahY8++ggffPCBwDJJaWQyGfz8/CCTmW3ZLYtHmaWBMkuDFDMD0swt9syC7p6Li4vDm2++ifPnz2POnDno2rUrateujadPn+L06dP49ttv0aFDB7z//vu4fPkyPv30UyQmJmLr1q0YPXp0ZeSoEnT3HCGEEGJ9KvXuue3bt+PcuXOIjY3Fe++9h+DgYDRu3BjBwcF477338M8//+Ds2bOIjIzElClTcPr0aTg7O2Pt2rWCAxHj1Go1Ll26JNq7EYSgzNJAmaVBipkBaeYWe2ZBg6ZffvkFo0aNQu3atQ1u9/b2xsiRI/HTTz8BAOrWrYuBAwfi8uXLwislRmk0GiQkJIj2bgQhKLM0UGZpkGJmQJq5xZ5Z0KDp4cOHsLOzK7WPvb09Hj58yP/bz88PBQUFQt6OEEIIIaTaCRo01a1bF3v27DE6CCooKMCePXt07phLTk5GzZo1hVVJCCGEEFLNBA2aJk+ejHv37qFbt27Yt28fnj17BgB49uwZ9u3bh27duuH+/fs6SxNER0ejdevW5qma6JDJZGjSpIlo70YQgjJLA2WWBilmBqSZW+yZBd09p1ar8eqrr2Lz5s380gMymYy/RskYw9ixY7Fp0ybIZDI8ffoUy5cvR9++fdGnTx/zJqhCdPccIYQQYn0q9e45uVyOTZs24cSJE5gwYQKCgoIQEBCAoKAgTJw4EcePH8fmzZv5kWTt2rWxatUqUQ+YLJlKpcKZM2egUqmqu5QqQ5mlgTJLgxQzA9LMLfbMgha3PHXqFFxdXREeHo7w8HBz10TKiTGGlJQUgwuNWivKLA2UWRqkmBmQZm6xZxZ0piksLAw//vijuWshhBBCCLFYggZNXl5esLe3N3cthBBCCCEWS9CgqVevXoiKihLt6TVrI5fLERQUBLlcXt2lVBnKLA2UWRqkmBmQZm6xZxZ099zjx4/RuXNn9O7dG59//jnc3d0rozaLQ3fPEUIIIdanUu+eGz9+PGrUqIH//ve/qFu3Ll544QWEhYXxE8O1f3r06CE4gCnu3LmDMWPGoF69enB0dETTpk2xZMkS5OXl6fQ7c+YMunXrBkdHR3h7e2P27NnIycmp1NqqkkqlQkREhGjvRhCCMksDZZYGKWYGpJlb7JkF3T0XFRXF/72wsBA3b97EzZs39fpp13CqDImJiejQoQPc3Nwwc+ZMuLu7IyYmBosWLcLff/+NvXv3AgBiY2PRo0cPNGvWDF999RUePnyIL7/8Enfu3MHhw4crrb6qxBhDdna2pC6XUmZpoMzSIMXMgDRziz2zoEGTJTxo79dff0VGRgb++usvNG/eHAAwbdo0aDQabNq0Cenp6ahZsyYWLlyImjVrIioqij/lFhAQgKlTp+LYsWPo3bt3dcYghBBCiEiIcx1zPL/+CDxfOLM4Hx8fyGQy2NraIisrC8ePH8f48eN1rlFOmDABzs7O2LFjR5XWTAghhBDxEnSmqbicnBzcvn0bubm5CA4ONkdNJgkNDcXnn3+OyZMnY/HixahVqxbOnDmDdevWYfbs2XBycsLp06ehUqnQrl07ndfa2toiKCgIly5dKvU9CgsLUVhYyP9bO1BTKpVQKpUAnj8+Ri6XQ61W65yB07arVCqd05ByuRwymcxou3a/WjY2zw9Ryeu/xds1Gg3at2/Pv79Go4Fareb7chwHGxsbo+3Gaq/OTMUpFAq92hlj6Ny5MziO09m/mDOVdZwA8MdZqVRaRaayjpP2s61lDZlKtpesneM4dO7cGYwxnfrFnKms41T8e5hSqbSKTKYcJ41Gg06dOhnsL9ZMpbUrFApwHKdzrC0lU8lsxggeNMXHx2POnDk4dOgQNBoNOI7j/4NOnz6NqVOnYu3atQgNDRX6FqXq27cvPvnkEyxduhT79u3j299//318+umnAICkpCQAz88+leTj44Po6OhS32PZsmVYvHixXvuxY8fg6OgIAPDz80ObNm1w5coVJCQk8H2aNGmCpk2b4vz580hJSeHbg4KC4O/vj1OnTiE7O5tv79y5M7y8vHDs2DGdD1pYWBgcHBxw6NAhnRr69++P/Px8REZG8m02NjYYMGAAUlNTERMTw7e7uLggPDwciYmJiI2N5ds9PT3RpUsX3LlzB7du3eLbxZTpwYMHVpfJ2HG6d++e1WWyxuNkrkwRERFWl8nYcTpx4oTVZSrPccrNzbW6TMaOU1paGi5cuGBxmUreQGaMoCUHEhIS0KFDBzx79gxDhgzBkydPEBMTw48SVSoV6tSpg2HDhuGHH34o7+5NtnnzZmzevBkjRoxArVq1cPDgQaxfvx7ffvstZs6ciV9//RUTJkzAuXPn0KFDB53XTpgwAfv27UNGRobR/Rs60+Tr64vU1FT+cp8ljOSVSiUiIiIQHh4OR0dHq/ztpGTt2jswevbsqfO0bDFnKus4FRQU4MSJEwgPD4dCobCKTGUdJ+1nu2fPnrC3t7eKTCXbS9au0Wj446ytS+yZyjpOeXl5/PcwhUJhFZlMOU7az3fv3r31bpwSa6bS2hUKBQoLC3H8+HH+WFtKpqysLHh4eJS55ICgM02LFi1Ceno6/vzzT3Tp0gWLFy/WGQ3b2NggODgYp0+fFrJ7k/z222+YNm0abt++jXr16gEAhg8fDo1Gg/nz5+Pll1+Gg4MDAOgMfLQKCgr47cbY2dnBzs5Or12hUEChUOi0yeVyg4t1Ff+mZ0p7yf2a2q5Wq/m/y2QynYGElrF2Y7VXd6biDNWuUqkgk8kM9hdrptLatd8oSn7+xJ6prOOkVqv5v1tLpuJK1q5UKqFSqWBjY2OwHjFmKqtdoVAY/GyLPZMp7Wq1GhzHWVWm0tplMpnBY13dmYxl0KvfpF4lHD16FMOGDUOXLl2M9vH398ejR4+E7N4ka9euRZs2bfgBk9bgwYORl5eHS5cu8ZfltJfpiktKSkKdOnUqrT5CCCGEWBdBg6a0tDQEBASU2ocxZvAMj7k8ffpU51SelvZUm0qlQosWLWBjY4OLFy/q9CkqKkJsbCyCgoIqrT5CCCGEWBdBg6batWvjzp07pfa5evUq/Pz8BBVlisaNG+PSpUu4ffu2Tvu2bdsgk8nQqlUruLm5oWfPnti8ebPOBLBff/0VOTk5GDlyZKXVV5VsbGwQFhZm9PSjNaLM0kCZpUGKmQFp5hZ7ZsEP7D1w4ACuXLlicHt0dDQiIiLQv3//ChVXmnnz5kGtViM4OBiffPIJ1q5di/79+2PPnj147bXX+Etvn332GdLS0hASEoLvv/8eH3zwAWbOnInevXujb9++lVZfVStrfpY1oszSQJmlQYqZAWnmFnNmQYOmDz74AA4ODujevTs+++wz3L17FwBw+PBhfPjhh+jbty88PDwwb948sxZbXPfu3XHmzBm0bdsWa9euxZtvvol79+7hs88+w7p16/h+L774Ik6cOAEHBwe89dZb+PHHHzF58mTs2rWr0mqraiqVCocOHRLts3yEoMzSQJmlQYqZAWnmFntmQefHAgICcPToUYwZMwYffvghOI4DYwwDBw4EYwx+fn7YtWuXwfWRzKlDhw5660MY0q1bt0q9k48QQggh1k/wRcWOHTvizp072L9/P86dO4e0tDS4urqiY8eOGDJkCGxtbc1ZJyGEEEJItarQTCwbGxsMGzYMw4YNM1c9hBBCCCEWSdCK4FKVlZUFNze3MlcMrWqMMX4xvJKrylorykyZrRVllkZmQJq5LTWzqT/fBZ9pKioqwp49e3DhwgVkZGQYXDOJ4zj88ssvQt+ClEN+fj5cXFyqu4wqRZmlgTJLgxQzA9LMLebMggZNDx48QK9evXDv3j2UdqKKBk1VQ6VSITIyEv379zd5KXixo8yU2VpRZmlkBqSZW+yZBQ2a3nrrLdy9exevvPIKXnvtNdSrV0+0C1URQgghhJhC0EgnIiICPXr0wMaNG81dDyGEEEKIRRK0uKVGo0GbNm3MXQupACme6aPM0kCZpUGKmQFp5hZzZkF3z/Xq1Qv29vbYv39/ZdRksSz17jlCCCGECGfqz3dBZ5qWL1+OiIgIq3oUiZhpNBokJydDo9FUdylVhjJLA2WWBilmBqSZW+yZBZ0jO3jwIMLCwjB69GiEhITgxRdfNDgy4zgOH374YYWLJKVTq9WIiYlB//79IZMJGgeLDmWmzNaKMksjM1OroYy9iPsRJ+EW3gO2Qe3AyeXVXValE/uxFjRo+vjjj/m/R0VFISoqymA/GjQRQgghulTRkSj67iuw1GS0BKA6uhdqDy/YzpgLm+Cw6i6PlELQoCkyMtLcdRBCCCFWTxUdicLF7+m1s9Tk5+2LltPAyYIJGjSFhISYuw5SARzHwcXFxaKWpK9slFkaKLM0iDGzRqNBampqOV+khsPqFeAAGErKAOSv+RL5jZoCMtMu1Xl4eIjqMpcYj3Vx9Oy5cqC75wghhABAcnIyateuXa7XBNdwweG2zcrs1+/vG4jOyDZpn0+fPoWXl1e56iD6KvXuOeD5UuirVq1Chw4d4OrqqrPuQmxsLKZPn47bt28L3T0pB41GgwcPHoj2bgQhKLM0UGZpkEpmbzvTHhtiaj8xEvuxFnR5Lj8/H71798aZM2fg4eEBV1dX5Obm8tsDAwOxfv16uLu749NPPzVbscQwtVqN2NhY1KlTR1SnaSuCMlNma0WZxZHZw8MDT58+LddrZNevAp8uLLPf2t92YM0LLU2uQ0zEeKyLEzRoWrp0KU6fPo3ly5dj3rx5WLx4MT755BN+u5ubG0JCQnD06FEaNBFCCLE6Mpms3JfFWK1Q5Ht4gaUmG+3DedZGreBQSSw/IEaChnnbt29HWFgY3n33XXAcZ3BCV/369ZGQkFDhAgkhhBBrwMnlsJ0xt9Q+ttPfogGTBRM0aEpISEC7du1K7ePi4oLMzExBRZHy4TgOnp6eor0bQQjKLA2UWRqklNkmOAx2i5aD89A9S8V51oadBJYbEPuxFnR5zsXFBcnJxk8vAsC9e/fg6ekpqChSPjY2NujSpUt1l1GlKLM0UGZpkFpmm+AwyLt0h+ZqLFhaKjh3D8haBkniDJPYj7WgM02dOnXC/v37kZGRYXB7YmIiDh06hO7du1ekNmIitVqNmzdvQq1WV3cpVYYySwNllgYpZubkcqBlEO7W8QckMmACxH+sBQ2a5s2bh/T0dPTo0QOnT5+GSqUCAOTl5eHkyZPo06cPVCoV5s4t/dotMQ+NRoNbt26J9hZOISizNFBmaZBiZkCaucWeWdDlue7du2PNmjWYM2eOztkkFxcXAIBcLsfatWvRtm1b81RJCCGEEFLNBA2aAOCNN95AaGgovv/+e5w7dw5paWlwdXVFx44dMX36dDRv3tycdRJCCCGEVCvBgyYAaNasGb755htz1UIEkslk8PPzE+VCYUJRZmmgzNIgxcyANHOLPTM9e64c6NlzhBBCiPWp9GfPEcuhVqtx6dIl0d6NIARllgbKLA1SzAxIM7fYM9OgyQpoNBokJCSI9m4EISizNFBmaZBiZkCaucWemQZNhBBCCCEmqNBEcKnRTv/Kysqq5kp0KZVK5OXlISsrCwqForrLqRKUmTJbK8osjcyANHNbambtz/WypnnToKkcsrOzAQC+vr7VXAkhhBBCzC07Oxtubm5Gt9Pdc+Wg0Wjw+PFjuLi4WNTDBrOysuDr64vExETJ3NVHmSmztaLM0sgMSDO3pWZmjCE7Oxt16tQpdTkEOtNUDjKZDPXq1avuMoxydXW1qA9hVaDM0kCZpUGKmQFp5rbEzKWdYdKiieCEEEIIISagQRMhhBBCiAlo0GQF7OzssGjRItjZ2VV3KVWGMksDZZYGKWYGpJlb7JlpIjghhBBCiAnoTBMhhBBCiAlo0EQIIYQQYgIaNBFCCCGEmIAGTYQQQgghJqBBEyGEEGImAQEBmDRpUnWXQSoJDZpE5N69e/jPf/6D+vXrw97eHq6urujatSu++eYbzJ8/HxzHlfknNDS0umOUC2XWzZyfn8/3U6vVWL9+PUJDQ+Hu7g47OzsEBATg1VdfxcWLFyu9zg0bNpT6/3727Fmzvp9Go8GGDRswePBg+Pr6wsnJCS1atMCnn36KgoICo6+7ceMGOI6Dvb09MjIyDPYJDQ1FixYtzFpvaeLi4jBz5kw0btwYjo6OcHR0xAsvvIAZM2bgypUrBl/z7rvvguM4jB49usrqNCdryFzaZ/69996r7vKqjKlf+8XbZDIZ6tSpg969eyMqKkpnfwEBAeA4Dj179jT4fj/99BO/n6r43lYaeoyKSBw8eBAjR46EnZ0dJkyYgBYtWqCoqAh//fUX5s2bh27duuHXX3/l++fk5OCNN97AsGHDMHz4cL69du3a1VG+IJRZP/O1a9fw448/Ij8/H8OHD8eRI0fQvXt3LFy4EO7u7oiPj8eOHTuwceNGJCQkVMljf5YsWYLAwEC99oYNG5r1ffLy8vDqq6+iU6dOeP311+Hl5YWYmBgsWrQIJ0+eREREhMFnQm7evBne3t5IT0/Hrl27MGXKFLPWVV4HDhzA6NGjYWNjg3HjxqF169aQyWS4efMmfv/9d6xbtw5xcXHw9/fnX8MYw7Zt2xAQEID9+/cjOzsbLi4u1ZiifKwts6HPfFUOui2FKV/7vXr1woQJE8AYQ1xcHNauXYvw8HAcPHgQ/fr14/vZ29sjMjIST548gbe3t87+tmzZAnt7+1J/OaoyjFi8+/fvM2dnZ9a0aVP2+PFjve137txhX3/9tU5bSkoKA8AWLVpURVWaF2UuPfOMGTMYALZq1Sq9fiqViq1YsYIlJiZWar3r169nANiFCxcq9X20CgsL2enTp/XaFy9ezACw48eP623TaDQsICCAzZ07lw0bNoyFhoYa3HdISAhr3ry52Wsu6e7du8zJyYk1a9bM4DFWKpXsm2++YQkJCTrtERERDACLiIhgCoWCbdiwodJrNRdrymzKZ97f359NnDix6oqqBqZ+7QNgM2bM0Gm7cuUKA8B69+7Nt/n7+7MePXowV1dXve/riYmJTCaTsREjRlTp9xtj6PKcCHzxxRfIycnBL7/8Ah8fH73tDRs2xJw5c6qhsspDmY1nfvjwIX744Qf06tULb775pl4/uVyOd955x2IeLq3RaPDNN9+gZcuWsLe3h6enJ/r27atzml2lUuGTTz5BgwYN+MuMCxcuRGFhId/H1tYWXbp00dv/sGHDADy/DFfS6dOnER8fjzFjxmDMmDE4deoUHj58WAkpTfPFF18gNzcX69evN3iMbWxsMHv2bPj6+uq0b9myBS+88ALCwsLQs2dPbNmypapKrjApZi7p/v37GDlyJNzd3eHo6IhOnTrh4MGDOn2ioqLAcRy2b9+OhQsXwtvbG05OThg8eDASExN1+t65cwcjRoyAt7c37O3tUa9ePYwZMwaZmZlVGUuQli1bwsPDA3FxcTrt9vb2GD58OLZu3arTvm3bNtSsWRN9+vSpyjKNostzIrB//37Ur1/f4A8Ma0WZjTt8+DBUKhVeeeWVKqqsdJmZmUhNTdVp4zgOtWrVAgBMnjwZGzZsQL9+/TBlyhSoVCpER0fj7NmzaNeuHQBgypQp2LhxI1566SW8/fbbOHfuHJYtW4YbN27gjz/+KPX9nzx5AgDw8PDQ27ZlyxY0aNAA7du3R4sWLeDo6Iht27Zh3rx55ohebgcOHEDDhg3RsWNHk19TWFiI3bt34+233wYAvPzyy3j11VcNXsawRNaY2dBn3tDnDwCePn2KLl26IC8vD7Nnz0atWrWwceNGDB48GLt27eIH/VqfffYZOI7D/PnzkZycjK+//ho9e/ZEbGwsHBwcUFRUhD59+qCwsBCzZs2Ct7c3Hj16hAMHDiAjIwNubm6Vlruksr72DUlPT0d6errBy/djx45F7969ce/ePTRo0AAAsHXrVrz00ktQKBTmLV6oaj3PRcqUmZnJALAhQ4aU63VivlRFmUv31ltvMQDs0qVLlV5XabSn6A39sbOzY4z97xLL7Nmz9V6v0WgYY4zFxsYyAGzKlCk629955x3+8kxpevbsyVxdXVl6erpOe1FREatVqxZ7//33+baxY8ey1q1b6+2jKi7PaY/x0KFD9balp6ezlJQU/k9eXh6/bdeuXQwAu3PnDmOMsaysLGZvb2/w0qylsbbMpX3mtUpennvzzTcZABYdHc23ZWdns8DAQBYQEMDUajVjjLHIyEgGgNWtW5dlZWXxfXfs2MEAsG+++YYxxtilS5cYALZz585KTmucKV/7jD2/PDd58mSWkpLCkpOT2blz51iPHj0YALZy5Uq+n7+/PxswYABTqVTM29ubffLJJ4wxxq5fv84AsD///LPKpwMYQ5fnLFxWVhYAWMwEyKpAmc3Xtyp89913OH78uM6fw4cPAwB2794NjuOwaNEivddpJ20fOnQIADB37lyd7dqzDCUvYxS3dOlSnDhxAsuXL0eNGjV0th0+fBjPnj3Dyy+/zLe9/PLLuHz5Mq5du1b+oBWkPW7Ozs5620JDQ+Hp6cn/+e677/htW7ZsQbt27fjfzF1cXDBgwABRXK6y1syGPvPGHDp0CB06dEC3bt34NmdnZ0ybNg3x8fG4fv26Tv8JEybofG2/9NJL8PHx4b9OtGeSjh49iry8PHPGKrfSvva1fvnlF3h6esLLywsdO3bE6dOnMXfuXKNTC0aNGoVt27YBeP458PX1RXBwcFXEMQldnrNwrq6uAIDs7OxqrqTqUGbz9a0KHTp04C+zlXTv3j3UqVMH7u7uRl//4MEDyGQyvdP13t7eqFGjBh48eGDwddu3b8cHH3yAyZMn44033tDbvnnzZgQGBsLOzg53794FADRo0ACOjo7YsmULli5dampEs9D+IMzJydHb9sMPPyA7OxtPnz7F+PHj+faMjAwcOnQIM2fO5DMAQNeuXbF7927cvn0bjRs3rvziBbLWzKV95kt68OCBwUuTzZo147cXv/OuUaNGOv04jkPDhg0RHx8PAAgMDMTcuXPx1VdfYcuWLQgODsbgwYMxfvz4Kr00B5j2/zBkyBDMnDkTHMfBxcUFzZs3h5OTk9H+Y8eOxbfffovLly9j69atGDNmjMG7YqsLDZosnKurK+rUqYN///23ukupMpS5dE2bNgUAXL16FUFBQZVcWdUpzzfG48ePY8KECRgwYAC+//57ve1ZWVnYv38/CgoK9H4IAc/nSWjnjlQVNzc3+Pj4GDzG2h+q2h+MWjt37kRhYSFWrlyJlStX6r1uy5YtWLx4caXUaw5SzFwVVq5ciUmTJmHv3r04duwYZs+ejWXLluHs2bMWcwOIVr169Yyuv2RIx44d0aBBA7z55puIi4vD2LFjK7G68qPLcyIwcOBA3Lt3DzExMdVdSpWhzMb169cPcrkcmzdvrqLKhGvQoAEeP36MtLQ0o338/f2h0Whw584dnfanT58iIyNDZ+0eADh37hyGDRuGdu3aYceOHbCx0f/d7/fff0dBQQHWrVuHnTt36vz59NNP8eDBA5w+fdo8IcthwIABuHv3Ls6fP29S/y1btqBFixZ6GXbu3ImePXvq3WlkiaSYuTh/f3/cunVLr/3mzZv89uJKfh0wxnD37l0EBATotLds2RIffPABTp06hejoaDx69MjgLxBi9PLLLyMqKgrNmjWzvF8Mq3VGFTGJdp2TF154gT158sTgdmtbs4gyl5759ddfZwDYt99+q9dPrVazL7/80iLWaSrPRPBp06bpbH/33Xf1JoJfv36d1apVizVv3pylpaUZfd8ePXqw+vXrG9xWUFDAnJ2d2euvv863VdU6Tbdv32aOjo6sefPmBo/x/fv3GQC2YsUKlpCQwDiOY0uWLDG4ry1btjAA7OzZs5VddoVYU2Yh6zRpJ4KfOXOGb8vJyWH169cv10Rw7dd+ZmYmUyqVOu+ZlZXFZDIZe+edd8wRs0wVWafJEO1EcK34+Hi2aNEidujQoXK/Z2Wjy3Mi0KBBA2zduhWjR49Gs2bNdFaKPnPmDHbu3Gl1zzqizKVnXrlyJe7du4fZs2fj999/x8CBA1GzZk0kJCRg586duHnzJsaMGVMldR8+fJj/rbm4Ll26ICwsDK+88gq+/fZb3LlzB3379oVGo0F0dDTCwsIwc+ZMtG7dGhMnTsSPP/6IjIwMhISE4Pz589i4cSOGDh2KsLAwAM/ncPXp0wfp6emYN2+e3gTxBg0aoHPnznj8+DEiIyMxe/Zsg/Xa2dmhT58+2LlzJ7799tsqvZW5UaNG2Lp1K15++WU0adKEXx2b/f9qyVu3boVMJkO9evWwdetWMMYwePBgg/vq378/bGxssGXLlnLdzl/VpJi5uPfeew/btm1Dv379MHv2bLi7u2Pjxo2Ii4vD7t27IZPpXvBxd3dHt27d8Oqrr+Lp06f4+uuv0bBhQ0ydOhUAEBERgZkzZ2LkyJFo3LgxVCoVfv31V8jlcowYMaJKs5X2tV+/fn3B+/X398fHH39cgcoqUbUO2Ui53L59m02dOpUFBAQwW1tb5uLiwrp27cpWr17NCgoKdPqK/ayLFmU2nlmlUrGff/6ZBQcHMzc3N6ZQKJi/vz979dVXq2Q5gtJuOwbA1q9fz9e5YsUK1rRpU2Zra8s8PT1Zv3792N9//83vS6lUssWLF7PAwECmUCiYr68vW7BggU7euLi4Ut9P+9v9ypUrGQB28uRJo7Vv2LCBAWB79+5ljFXdmSatu3fvsjfeeIM1bNiQ2dvbMwcHB9a0aVP2+uuvs9jYWMYYYy1btmR+fn6l7ic0NJR5eXnpnXmwRNaQWeiK4Pfu3WMvvfQSq1GjBrO3t2cdOnRgBw4c0OmjPdO0bds2tmDBAubl5cUcHBzYgAED2IMHD/h+9+/fZ6+99hpr0KABs7e3Z+7u7iwsLIydOHHCrFlLY+rXPgSeaSrtPav7TBPHGGNVMjojhBBCiEFRUVEICwvDzp078dJLL1V3OcQImghOCCGEEGICGjQRQgghhJiABk2EEEIIISagOU2EEEIIISagM02EEEIIISagQRNBVFQUOI5DVFRUdZdCiOR8/PHHgh/n8uuvv6Jp06ZQKBR6DywmhJgfLW5JCCEidPPmTUyaNAl9+/bFe++9B0dHx+ouyazWrl0LR0dHq1vElogbDZoIIUSEoqKioNFo8M0336Bhw4bVXY7ZrV27Fh4eHjRoIhaFLs+JmEajQUFBQXWXQQipBsnJyQBAl+UIqUI0aLIA2jkNN2/exKhRo+Dq6opatWphzpw5OoMijuMwc+ZMbNmyBc2bN4ednR2OHDkCAHj06BFee+011K5dG3Z2dmjevDn++9//6r3Xw4cPMXToUDg5OcHLywtvvfUWCgsLqyzj7du3MX78eLi5ucHT0xMffvghGGNITEzEkCFD4OrqCm9vb6xcuVLn9YWFhVi0aBEaNmwIOzs7+Pr64t1339Wrff369QgPD4eXlxfs7OzwwgsvYN26dXr1XLx4EX369IGHhwccHBwQGBiI1157rVL/Dwj566+/0L59e9jb26NBgwb44YcfDPbbvHkz2rZtCwcHB7i7u2PMmDFITEzktwcEBGDRokUAAE9PT3AcV63P6nrw4AGmT5+OJk2awMHBAbVq1cLIkSMRHx+v08/Y/K0NGzaA4zi+f0BAAK5du4Y///wTHMeB4ziEhoby/e/fv4+RI0fC3d0djo6O6NSpk96zCAmpDHR5zoKMGjUKAQEBWLZsGc6ePYtvv/0W6enp2LRpE98nIiICO3bswMyZM+Hh4YGAgAA8ffoUnTp14gdVnp6eOHz4MCZPnoysrCy8+eabAID8/Hz06NEDCQkJmD17NurUqYNff/0VERERVZZR+zDa5cuX4+DBg/j000/h7u6OH374AeHh4fj888+xZcsWvPPOO2jfvj26d+8OjUaDwYMH46+//sK0adPQrFkzXL16FatWrcLt27exZ88efv/r1q1D8+bNMXjwYNjY2GD//v2YPn06NBoNZsyYAeD5b+i9e/eGp6cn3nvvPdSoUQPx8fH4/fffq+z/gUjP1atX+c/dxx9/DJVKhUWLFqF27do6/T777DN8+OGHGDVqFKZMmYKUlBSsXr0a3bt3x6VLl1CjRg18/fXX2LRpE/744w+sW7cOzs7OaNWqVTUlAy5cuIAzZ85gzJgxqFevHuLj47Fu3TqEhobi+vXr5Z5v9fXXX2PWrFlwdnbG+++/DwD8/9PTp0/RpUsX5OXlYfbs2ahVqxY2btyIwYMHY9euXRg2bJjZ8xHCq9Yn3xHGGGOLFi1iANjgwYN12qdPn84AsMuXLzPGnj/8UCaTsWvXrun0mzx5MvPx8WGpqak67WPGjGFubm4sLy+PMcbY119/zQCwHTt28H1yc3NZw4YNGQAWGRlZCeme02acNm0a36ZSqVi9evUYx3Fs+fLlfHt6ejpzcHDgH3r566+/MplMxqKjo3X2+f333zMA7PTp03ybNmtxffr0YfXr1+f//ccff1jEgx+JtAwdOpTZ29vrPHz1+vXrTC6XM+234vj4eCaXy9lnn32m89qrV68yGxsbnXbt11RKSkrVBCiFoa+7mJgYBoBt2rSJb9PWXJL2YaxxcXF8W/PmzVlISIhe3zfffJMB0Pl+kJ2dzQIDA1lAQABTq9UVC0NIKejynAXRngnRmjVrFgDg0KFDfFtISAheeOEF/t+MMezevRuDBg0CYwypqan8nz59+iAzMxP//PMPvx8fHx+dh0E6Ojpi2rRplRlLx5QpU/i/y+VytGvXDowxTJ48mW+vUaMGmjRpgvv37wMAdu7ciWbNmqFp06Y6+cLDwwEAkZGR/GsdHBz4v2dmZiI1NRUhISG4f/8+MjMz+f0DwIEDB6BUKistKyFaarUaR48exdChQ+Hn58e3N2vWDH369OH//fvvv0Oj0WDUqFE6n3Vvb280atRI57NuSYp/3SmVSjx79gwNGzZEjRo1+O8/5nLo0CF06NAB3bp149ucnZ0xbdo0xMfH4/r162Z9P0KKo8tzFqRRo0Y6/27QoAFkMpnOvIDAwECdPikpKcjIyMCPP/6IH3/80eB+tRNGHzx4gIYNG+rNKWjSpIkZqjdN8R8YAODm5gZ7e3t4eHjotT979gwAcOfOHdy4cQOenp4G96nNBwCnT5/GokWLEBMTg7y8PJ1+mZmZcHNzQ0hICEaMGIHFixdj1apVCA0NxdChQzF27FjY2dmZIyYhOlJSUpCfn6/3NQ48//rT/mJ0584dMMYM9gMAhUJRqXUKlZ+fj2XLlmH9+vV49OgRWLEHTWh/WTGXBw8eoGPHjnrtzZo147e3aNHCrO9JiBYNmiyYoQmTxX+jA57fQQcA48ePx8SJEw3upzrnOpQkl8tNagPAf+PVaDRo2bIlvvrqK4P9fH19AQD37t1Djx490LRpU3z11Vfw9fWFra0tDh06hFWrVvH/VxzHYdeuXTh79iz279+Po0eP4rXXXsPKlStx9uxZODs7myMqIeWm0WjAcRwOHz5s8OvCUj+bs2bNwvr16/Hmm2+ic+fOcHNzA8dxGDNmDP91Bxj+ngY8PxNHiBjQoMmC3LlzR+dM0t27d6HRaBAQEGD0NZ6ennBxcYFarUbPnj1L3b+/vz/+/fdfMMZ0vnndunWrwrVXpgYNGuDy5cvo0aNHqSsn79+/H4WFhdi3b5/OGS1jlzQ6deqETp064bPPPsPWrVsxbtw4/PbbbzqXEAkxB09PTzg4OODOnTt624p//TVo0ACMMQQGBqJx48ZVWWKF7Nq1CxMnTtS567WgoAAZGRk6/WrWrAkAyMjI0Fkq4cGDB3r7NPa17u/vb/B71s2bN/nthFQWmtNkQb777judf69evRoA0K9fP6OvkcvlGDFiBHbv3o1///1Xb3tKSgr/9/79++Px48fYtWsX35aXl/d/7d17UFTl/wfw92G5LC7IcluVlAU3L4mCiYgOCEKMBIiiIiqToTiaiqjlJStnkMZSy2s06ugMiFZeCu+NWiqCF9RAy1GBVC4aKl5QQbyA+Pn94W9PHnfBs2hC3z6vGWbkeZ59zuc5Z1k+nuc5D/VO6zUXMTExKCsrw5o1awzqHjx4gOrqagB/37F6fmogLS1N8prbt29L2gBA9+7dAeC1bL/A/nsUCgVCQ0Oxbds2XLp0SSzPz8/H3r17xe+HDBkChUKB5ORkg/coEYlT1s2NQqEwiDclJcXgDpJOpwMAZGdni2XV1dVIT0836FOlUhkkXcDTz7ETJ04gJydH0sfq1avh5uYmWfPJ2KvGd5qakeLiYgwcOBDvvvsucnJy8N133yE2NhZeXl4Nvm7BggXIzMyEr68vxo0bhy5duqCiogInT57Evn37UFFRAQAYN24cvv32W7z//vvIy8tDmzZtsH79+mb/5xdGjRqFzZs3Y8KECcjMzISfnx/q6upQUFCAzZs3Y+/evejZsyf69+8PS0tLREZG4oMPPsC9e/ewZs0aaDQaXL16VewvPT0dK1aswODBg6HT6VBVVYU1a9agZcuWCA8Pb8KRsv9lycnJ2LNnD/r27YtJkybh8ePHSElJgYeHB06fPg3gaVIxb948fPLJJygpKUFUVBRsbW1RXFyMrVu3Yvz48ZgxY0YTj8TQgAEDsH79etjZ2aFLly7IycnBvn374OjoKGnXv39/uLq6YuzYsZg5cyYUCgVSU1Ph7OwsSSYBwNvbGytXrsS8efPw5ptvQqPRIDg4GLNnz8aGDRsQFhaGKVOmwMHBAenp6SguLkZGRgbMzPheAPsHNckze0xC/xjuuXPnKDo6mmxtbcne3p4mT55MDx48ENsBoISEBKN9lJeXU0JCArVr144sLCyodevW9M4779Dq1asl7UpLS2ngwIHUokULcnJyoqlTp9KePXte25YDzz8eHRcXRyqVyqB9YGAgeXh4iN/X1NTQwoULycPDg6ysrMje3p68vb0pOTmZ7t69K7bbsWMHeXp6klKpJDc3N1q4cCGlpqZKHmc+efIkjRw5klxdXcnKyoo0Gg0NGDCAcnNz/5nBM/b/srKyyNvbmywtLal9+/a0atUqo4/hZ2RkkL+/P6lUKlKpVNS5c2dKSEigwsJCsU1z2nLg9u3bNGbMGHJyciIbGxsKDQ2lgoIC0mq14tYhenl5eeTr60uWlpbk6upKS5YsMbrlwLVr1ygiIoJsbW0JgGT7gYsXL1J0dDSp1WpSKpXUq1cv2rVr1+sZLPtPE4ieu6fKXru5c+ciOTkZN27cMHiKjDHGGGPNA9/HZIwxxhiTgZMmxhhjjDEZOGlijDHGGJOB1zQxxhhjjMnAd5oYY4wxxmTgpIkxxhhjTAZOmhhjjDHGZOCkiTHGGGNMBk6aGGOMMcZk4KSJsWZs7dq1EAQBa9eubepQZJk7dy4EQcDBgwebOpRmraSkBIIgYPTo0U1y/H79+kEQhCY5dnPV1NeE/Ttw0sRYE/q3fVAfPHgQgiBg7ty5TR1Ks8eJienc3Nzg5ubW1GEwVi/zpg6AMVa/wYMHo3fv3mjTpk1ThyLL5MmTMWLECLi6ujZ1KIwx9spx0sRYM2ZnZwc7O7umDkM2Jycn/qPTjLH/WTw9x5gJsrOzERkZCScnJ1hZWaFDhw6YM2cO7t+/b9A2IyMDgYGB0Gg0UCqVcHFxQUhICDIyMgA8Xa/k7u4OAEhPT4cgCOKXfk1QfWuaBEFAv379UFZWhtjYWDg5OcHW1hYREREoKioCAOTn5yMqKgoODg6wtbVFdHQ0ysvLDeJMTU3FoEGD4ObmBqVSCQcHB4SGhiIzM1PSbu7cuQgKCgIAJCcnS+ItKSkR29S3pmnnzp0ICgqCnZ0drK2t4eXlhSVLluDx48eSds9OWV64cAGDBw+Gvb09VCoVQkJC8McffzR8kZ4xevRoCIKAoqIiLFq0CB07doS1tTW6dOmCjRs3AgBqamrw2WefieP39PTE7t27jfZXVVWFpKQkeHh4wNraGmq1GqGhoTh8+LCknSAIyMrKEv+t/zI2DWvKGM+cOYOYmBhoNBpYWVnB3d0d06ZNw61bt4y2P3z4MAIDA6FSqeDo6Ijhw4fj8uXLck+fRFpaGnx9fWFjYwMbGxv4+voaXWv37BTu0aNH0b9/f6jV6ganKvXXvLS0FKWlpZJz9vxUsNw4GtuesYbwnSbGZFq5ciUSEhKgVqsRGRkJjUaD3NxcfPHFF8jMzERmZiYsLS3FtpMmTUKbNm0wePBgODo64tq1azhx4gS2bt2KoUOHonv37pg6dSqWL18OLy8vREVFiceSs67j9u3b8Pf3R+vWrREXF4c///wTu3btQkFBAbZv346+ffvC29sb8fHxyMvLQ0ZGBioqKnDgwAFJPwkJCfDy8kJISAicnZ1RVlaGbdu2ISQkBFu2bMGgQYMAPF2jU1JSgvT0dAQGBqJfv35iH2q1usFYlyxZgunTp8PBwQGxsbFQqVTYsWMHpk+fjkOHDmHLli0Gv1RLSkrQu3dveHh4ID4+HhcvXsT27dsRFBSE/Px8tGrV6oXnSO+jjz7C8ePHERkZCYVCgY0bNyI2Nhb29vZISUnBuXPnEBERgYcPH+KHH37AoEGDkJ+fD51OJ/ZRUVGBgIAAnD17Fn5+fpgwYQIqKyvFmH788UfxGiYlJWHt2rUoLS1FUlKS2Ef37t0bPcbDhw8jNDQUNTU1iI6OhpubG3JycrB8+XLs2rULx44dk9zl279/P8LCwmBmZobhw4fDxcUF+/fvh5+fH+zt7WWfOwCYMmUKUlJS8MYbb2Ds2LEAnv6nYMyYMTh16hSWL19u8JqjR4/iyy+/RFBQEMaPH49Lly7V279arUZSUhKWLVsGAJg2bZpY9+z7zNQ4GhM3Yw0ixtgLnT17lszNzcnLy4tu3rwpqZs/fz4BoEWLFollPXr0IEtLSyovLzfo69nXFxcXEwCKi4szety0tDQCQGlpaZJyAASAPvzwQ0n5xIkTCQCp1WpatmyZWP7kyRMKDw8nAJSXlyd5TVFRkcFxr1y5Qi4uLtShQwdJeWZmJgGgpKQko/EmJSURAMrMzBTLLly4QObm5qTRaOjSpUti+cOHD8nf358A0Lp168Ry/TkBQAsWLJD0P2fOHAJA8+fPN3r858XFxREA6tixI12/fl0sP378uHie/P396d69e2Ldpk2bCAAlJiZK+oqNjSUAtGbNGkl5eXk5tWvXjpydnenBgwdieWBgINX3EWvqGOvq6kin0xEA2rNnj6T9zJkzCQDFx8dL2rdv354EQaBDhw6J5U+ePBHHIffjPysriwDQW2+9RXfu3BHLKyoqqGPHjgSAsrOzxXL9ewQApaamyjqGnlarJa1W+0riMLX9i34WGSMi4qSJMRmmTJli8CGrV1dXR87OzuTt7S2W9ejRg1QqFVVUVDTY78skTTY2NlRdXS0pz87OJgCk0+noyZMnkrp169aZ9IssMTGRAFBJSYlY1pik6fPPPycAtHDhQoP2R44cIQAUHBwslunPibu7O9XV1Una6+uGDBkiawz6pCk9Pd2grn379gSAsrKyJOWPHz8mCwsLCggIEMtu3LhBCoVCEuezvvnmGwJAO3fuFMvkJE1yx6i/rmFhYQZ9VVVVkYODAymVSnr06BER/Z0wREZGGrQvKSkhhUIhO2mKj48nALRp0yaDuu+//94gYdO/R3r06CGr/2c1lDSZGoep7TlpYnLw9BxjMhw7dgwAsHfvXuzfv9+g3sLCAgUFBeL3I0aMwKxZs9C1a1fExsYiKCgI/v7+aNmy5SuLqUOHDmjRooWkTP+Unaenp8F0l77uypUrkvKioiLMnz8fBw4cQFlZGR49eiSpv3LlCrRabaPjPHXqFADpNItenz59oFQq8fvvvxvUde/eHWZm0mWXbdu2BQDcuXPHpBienxYDnp6PoqIigzqFQgGNRiM5T7/99hvq6urw6NEjo9stnD9/HgBQUFCAAQMGmBSXnDE2dA5tbGzQs2dP/PLLLygsLES3bt3ENVF9+/Y1aK/VatGuXTtxHdqLNHRs/Ro3Y9fPx8dHVv9ymRpHY+NmrCGcNDEmQ0VFBQDgiy++kNV+xowZcHR0xMqVK7F48WIsWrQI5ubmiIiIwNKlS8UF4C/DWAJmbm7+wrra2lqx7MKFC+jVqxcqKysRFBSEyMhItGzZEmZmZjh48CCysrIMkihTVVZWAoDRNUiCIKBVq1YoKyszqGtoDHV1dSbF0Jhz9ex50l//I0eO4MiRI/Uep7q6+pXF9ewYGzqHwN8Jsb7d3bt3AQAajcZo+1atWslOmiorK2FmZgZnZ2ej/QiCIB73+bpXydQ4Ghs3Yw3hpIkxGfS/3CorK2Fra/vC9oIgID4+HvHx8bh16xYOHTqEDRs2YPPmzTh//jxOnz4NhULxT4f9QkuXLsXt27exfv16vPfee5K6CRMmiE+AvQz9uSsvLze4Y0VEKC8vf6V34P4J+vimT5+ORYsWNdnxjT39CADXrl2TtNNvU3H9+nWj7evrp75jP3nyBDdu3DBIwq5fvw4iMnr9XvXGnqbG0di4GWsIbznAmAy+vr4A/p6mM4WjoyOioqKwadMmBAcH49y5c7hw4QIAiImTqXdOXpWLFy8CgPiEnB4RGb2j0ph43377bQAwug3B8ePH8fDhQ6PTZ82Jj48PBEFATk6O7Ne8ymvb0Dmsrq5Gbm4urK2t0alTJwCAl5cXAODQoUMG7UtLS03adqChY+vLXtX1UygU9Z4vU+N4nXGz/w5OmhiTYdKkSTA3N0diYqLRR6fv3LkjrqEAnn4oE5GkTW1trTjNo1QqAQD29vYQBKHRe+e8LP2dn+f3GVqwYAHOnDlj0N7BwQEATIo3NjYW5ubmWLJkiWSdUE1NDT7++GMAaPZ/RqZ169aIiYnB0aNH8fXXXxtcW+BpAvjsfl2NOVf18fPzg06nw+7du7Fv3z5J3bx583Dr1i2MHDlS3PLC398f7u7u2LVrl+TaEhE+/fRTkxK5uLg4AE/35np2Ouvu3btITk6WtHlZDg4OuHnzJh4+fPjScbzOuNl/B0/PMSZD165dsWLFCkycOBGdOnVCeHg4dDodqqqqUFRUhKysLIwePRqrVq0CAERFRaFly5bo3bs3tFotamtr8euvv+LcuXOIjo4WkxUbGxv4+PggOzsbo0aNQocOHWBmZoZRo0a91OJruSZMmIC0tDQMHToUMTExcHR0xLFjx3Dy5ElERETg559/lrTv3LkzXFxcsHHjRlhZWaFt27YQBAGJiYn17lyu0+mwcOFCTJ8+HZ6enoiJiYFKpcLOnTtRWFiIQYMGGUwNNkcrVqxAYWEhZs2ahfXr16NPnz5Qq9W4fPkycnNzcf78eVy9elVcnB8cHIyffvoJQ4cORVhYGJRKJby8vBAZGWnysc3MzLB27VqEhoYiPDwcw4YNg1arRU5ODg4ePAidTocFCxZI2q9evRrh4eEICQkR92k6cOAArl69Ck9PT5w+fVrWsQMCApCYmIiUlBR07doVQ4cOBREhIyMDf/31F6ZMmYKAgACTx2RMcHAwcnNzERYWhr59+8LS0hIBAQHilylxvM642X9IEz21x9i/0okTJ2jEiBHk4uJCFhYW5OTkRD169KDZs2dTfn6+2G7FihU0cOBA0mq1pFQqydHRkXr16kUrV66kmpoaSZ+FhYUUHh5OarWaBEGQPLLf0JYDgYGBBvE19Nh0fdsFZGZmkp+fH9na2pJarabw8HDKy8szun0AEdGxY8coMDCQbG1txf14iouLicj4lgN627dvF19nZWVF3bp1o8WLF1Ntba3sMTQ0dmP0Ww7o43tWQ1sC1Pfo+/379+mrr74ib29vUqlUZG1tTe7u7hQVFUXr1q2TjKW2tpZmzZpFrq6uZG5uLhlTY8d4+vRpio6OJicnJ7KwsCCtVktTp06lGzduGO0nOzubAgICyNramhwcHGjYsGFUWlra4Njrk5qaSj4+PtSiRQtq0aIF+fj4GN2+4kXbUjSkqqqKxo0bR23atBG3RXi+H7lxmNqetxxgcghERu4zM8YYY4wxCV7TxBhjjDEmAydNjDHGGGMycNLEGGOMMSYDJ02MMcYYYzJw0sQYY4wxJgMnTYwxxhhjMnDSxBhjjDEmAydNjDHGGGMycNLEGGOMMSYDJ02MMcYYYzJw0sQYY4wxJgMnTYwxxhhjMvwf/aSRC4Rz+twAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAk0AAAHPCAYAAABOau4WAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAACm0UlEQVR4nOzdd3wURf8H8M/epXdCEhIgjRYQkCC9pAICoYMUEQGlPEpVFBAsFBVQpCgI1h9FijSlIwhJJIbQlADSS0ISCCSB9H638/uD5/bJcZdwuVzIzfp9v155KbNze/NhL3vD7syswBhjIIQQQgghFVLUdAMIIYQQQnhAnSZCCCGEEANQp4kQQgghxADUaSKEEEIIMQB1mgghhBBCDECdJkIIIYQQA1CniRBCCCHEANRpIoQQQggxAHWaCCGEEEIMQJ0mQqpIEIRK/4SGhlZLW+bPnw9BEDB//nyT7C8xMRGCIMDPz88k+/s3GDt2LARBwNixY2u6KWZD8zkSBAG2trZISUkpt66FhQUEQUBiYuKzayAhBrKo6QYQwrsxY8bolN2/fx+HDx8ud3vTpk2rvV2EmKOioiJ89NFH+L//+79n8n6CIAAA6IlhxBSo00RIFa1fv16nLDo6Wuo06dteXaZMmYIRI0bAzc3NJPurV68erly5AktLS5Psj/y7CYIAa2trbNy4Ee+88w6aN29e000ipFLo9hwhMuLm5oamTZuarNNkaWmJpk2bomHDhibZH/l3UygUmDp1KtRqNebOnVvTzSGk0qjTRMgzVnbcUVJSEsaNGwdvb29YWlpqjYP55ZdfMH78eLRo0QK1atWCjY0N/P398frrr+PatWtP3XdZ69evl8bZ5OfnY86cOWjUqBGsra3h6emJMWPG4O7duzr7q2hMk2aMCgDs2rULXbt2hZOTE+zt7dGlSxccPHiw3L+DO3fuYOzYsfD09ISNjQ0aN26MefPmoaioCKGhoRAEAdHR0U/9uyxLpVLhm2++QefOneHs7Cztd9q0aXqzVTVDZUVHR0vj2YqLi7FgwQI0adIENjY28PHxwezZs1FUVAQAyM7OxrvvvosGDRrAxsYGfn5+mD9/PlQqlc5+09PT8dVXXyEiIgL+/v6wtbWFk5MT2rZti88++0zapz7//PMPhgwZAjc3N9jZ2aFly5ZYuXIlRFGEn59fuWOLVCoVfvjhB4SGhsLV1RXW1tbw9/fHm2++ieTk5Ar/HubMmYNatWph7969iI2NrdxfIoCdO3eiV69ecHd3h5WVFerVq4dRo0bh8uXLWvU0vwsaT44rpDFTxCiMEGJyUVFRDADT9ys2b948BoCNHDmSubq6Mk9PTzZkyBA2ePBg9s4770j1lEols7OzY23btmWDBw9m/fv3Zw0aNGAAmL29PYuNjS133/PmzdMqX7duHQPABg4cyJ5//nnm4uLC+vXrxwYMGMA8PDwYAObr68uysrK0XpeQkCBte5Im30cffcQEQWBdunRhw4cPZ61atWIAmCAI7JdfftF53aVLl5ibmxsDwOrWrcuGDRvG+vTpw+zt7VnXrl1Z586dGQAWFRVl2F82Y6yoqIh1796dAWA2Njasd+/ebPjw4czb25sBYG5ubuyvv/4yWYaKjBkzhgFgY8aM0SrXfCY6derEQkJCmJOTE+vfvz/r27cvc3Z2ZgBY37592cOHD1lAQABzd3dnQ4YMYS+++CKzsbFhANgbb7yh834//fQTA8Dq1avHQkJC2IgRI1i3bt2Yg4OD9H5FRUU6r4uOjma2trYMAGvYsCEbMWIE69GjB7OysmLDhw9nvr6+DABLSEjQel1OTg4LDQ1lAJiDgwMLCQlhL730EgsICGAAWO3atdnff/+t9RrN50ipVDLGGPvss88YANalSxeddimVSr3vW1payoYNG8YAMGtra9a5c2c2dOhQ6VjZ2tqyQ4cOSfV//fVX6VhojkfZn/T09IoOIyF6UaeJkGpgSKcJABs1apTeLzTGGPv5559ZXl6eVpkoiuzrr79mAFjz5s2ZKIp6911epwkA69mzJ8vOzpa2PXr0iAUGBjIAbNGiRVqvM6TT5OLiwk6ePKm3HU2aNNF53QsvvMAAsBEjRmhlT0lJkb54K9tpmj17tvTlX/bLtqSkhI0bN44BYP7+/qy4uNgkGSrytE4TANa+fXuWkZEhbUtMTGS1atViAFjLli1Zv379WH5+vrT9zJkzzMLCgikUCnbnzh2t/V6+fJnFxcXptOPRo0fsxRdfZADY559/rrWtoKCA1atXjwFg77zzDlOr1dK2S5cusTp16khtfbLzMnLkSKmD9+DBA61tK1asYABY48aNmUqlksqf7DQVFBSw+vXrMwBsz549Wvsor9M0d+5cBoB16NCB3b59W2vbjh07mFKpZLVq1WKZmZla28r7PSTEGPRJIqQaGNJpcnV11bmyY6hOnToxAOzSpUt6911ep8ne3p7du3dPZ38///wzA8DCw8O1yg3pNH311Vc624qKiqSrJ0lJSVL58ePHpSsUDx8+1Hnd/v37K91pKiwslK6q7N27V2d7fn6+1AnYvHlzlTM8zdM6TYIgsIsXL+q8btq0adLfzZOdEcYY69evHwPANmzYYHBbrl27xgCwdu3aaZVv3LhROq4lJSU6r1u9erXeTtPly5eZIAisbt26LCcnR+97RkREMABs3759UtmTnSbGGPvhhx+kzn/ZDpa+TtPDhw+Zra0ts7GxYSkpKXrfd9KkSQwAW7VqlVY5dZqIKdGYJkJqSPfu3eHs7FxhnZs3b2L16tV46623MG7cOIwdOxZjx47FgwcPAKDcsU3ladu2Lby8vHTKmzVrBgDljv2pSL9+/XTKrK2t0aBBA519/vHHHwCAXr16wdXVVed1ffr0gYuLS6Xe/+zZs8jLy4Orq6vettjZ2WHEiBEAgKioqCpnqCofHx+0aNFCp7xx48YAgDZt2sDDw6Pc7ffu3dPZplarcezYMXz88ceYNGkSXnvtNYwdOxaffvopAN3PieY4DB06VO/MyFdeeUVv2w8ePAjGGHr37g1HR0e9dTRrkJ04cULvdo2xY8fiueeew6VLl7Bhw4YK60ZFRaGwsBBdunRBvXr1qvS+hFQFLTlASA2paMFItVqNKVOm4Ntvv61wfZmcnJxKvaePj4/ecicnJwCocNCwKfapWdSwouy+vr7Iysoy+P01HRp/f/9y62hm/5XX+amOv5fylPdeDg4OFW7XdFKebMuNGzcwaNAgXLp0qdz3fPJz8rTj4OLiAmdnZ2RnZ2uV3759GwDw448/4scffyz3/YDHA9QrolQqsWjRIgwcOBDz5s3DyJEjYWNjo7eu5n2PHTumNbjbmPclpCqo00RIDbG1tS1325dffolvvvkGnp6eWL58OTp37ow6depIXyojR47E1q1bK71gn0Jh+ovLxuyzoi++p30pVofq+Hsx9r0q25aXXnoJly5dQt++fTFr1iw899xzcHJygqWlJUpKSmBtbV3uayt7HERRBAAEBgaiVatWFbarQ4cOT237gAED0LlzZ5w4cQKrVq3CzJkz9dbTvG+jRo3QpUuXCvdJC8eS6kSdJkLM0Pbt2wEA3377Lfr376+z/caNG8+6SSahubVS0XTvO3fuGLXPhISEcutorlSUd2uHV1evXsWFCxfg4eGBX3/9FRYW2qf08j4nTzsO2dnZeq/2eXt7AwC6dOmC1atXG9/wMj777DMEBQVh8eLFmDBhgt46mvcNCAh4povFEvIkGtNEiBl69OgRgMe3qp506dIlxMfHP+MWmUZwcDAA4LfffkNmZqbO9kOHDuktr0jbtm3h4OCAR48eYe/evTrbCwsL8fPPPwMAwsLCjGi1+dJ8TurWravTYQKATZs26X2d5jjs2LFD79pPW7Zs0fu63r17AwD27t1rsluWXbt2Rb9+/ZCZmYnFixfrrdOtWzdYWVkhOjoaaWlpldq/ZsyWvpyEVBZ1mggxQ5qB2V9//bV0awIAUlNTMXr0aG6/AIKDg9GqVSvk5uZi6tSpKCkpkbbdu3cP77zzTqX3aWNjg8mTJwMA3nnnHa0rVaWlpZg+fTru378Pf39/vPTSS1UPYUaaNGkCpVKJixcv6iwGum/fPqxYsULv64YOHQovLy8kJibi/fff1/qMXb16FQsXLtT7utatW2PIkCFITk7G4MGD9V6pys/Px+bNm6XJCoZYtGgRFAoFVq1apdUWjTp16mDq1KnIz89Hv379cPHiRZ06xcXF2Lt3L65evapVXr9+fQCocMwXIYaiThMhZmju3LmwsrLC999/j4CAAAwfPhy9e/dGw4YNUVxcjEGDBtV0E40iCAI2bdoEV1dXbN68GQ0aNMDw4cPRr18/NGnSBK6urujUqRMAwMrKyuD9LliwAN26dcPNmzfRrFkz9OnTByNGjECjRo3w/fffo3bt2tixY0el9skDNzc3TJkyBWq1Gt26dUNoaChGjhyJNm3aoH///uWOEbKzs8OmTZtgY2ODzz//HAEBAXj55ZfRs2dPtGrVCkFBQdKA9Cf/ztatW4du3brh0KFDCAgIQPv27TF8+HAMGzYM7du3h6urK0aNGlWpK4YtWrTA6NGjUVhYWO44vSVLlmDkyJE4ffo0AgMD8cILL+Cll17CiBEj0LVrV9SuXRsDBgzQ6cgNGTIEwOPZqsOHD8f48eMxfvx4PHz40OD2EaJBnSZCzFCHDh1w9uxZ9O/fH/n5+di7dy9u3bqFqVOnIi4uTprVxaMWLVrgr7/+wquvvorS0lLs3r0bV65cwfTp0/H7779LVygq8/w8a2tr/Pbbb1izZg1atWqFmJgY/Prrr7C0tMTUqVNx/vx5tGnTproi1agVK1bgxx9/ROvWrfHXX3/h4MGDsLOzw88//4yPP/643NeFh4fj1KlTGDRoEB49eoTdu3cjJSUFn376KTZt2oT79+9DoVDoLA3h6OiII0eOYMuWLejevTuSkpLw66+/IjIyEoWFhXjllVfw66+/Vvp5hQsXLix39hwAWFhYYPPmzTh48CAGDhyItLQ07N27F4cPH8ajR4/Qr18/bNmyRbr1qPHxxx9j1qxZcHFxwe7du6WZf7m5uZVqHyEAILDKTr8hhJBqkpCQgEaNGsHR0RGPHj16prPayP8cP34cISEhaNmyJS5cuFDTzSHEbNAZiRDyTOXn5+sdX3Lnzh288sorEEURY8aMoQ5TNUtPT9c74/Cff/6RZrG99tprz7pZhJg1utJECHmmEhMT4e/vj4YNG6JJkyZwcnJCUlIS/v77bxQXF6NVq1Y4fvw417cgeRAdHY2wsDA899xzaNCgAWxtbZGQkIC///4boiiiR48eOHjwoN5ZeYT8W1GniRDyTOXl5WHBggWIjIxEUlISsrKyYGdnh4CAAAwZMgRTp06FnZ1dTTdT9u7du4dFixbhjz/+wN27d5GbmwtHR0c0b94cI0eOxIQJE6jDRMgTqNNECCGEEGIAGjRACCGEEGIAuvZaCaIo4t69e3B0dKyR52MRQgghxPQYY8jNzUXdunUrnIRCnaZKuHfvnvQMJEIIIYTIS3JysrSKvD7UaaoER0dHAI//Us1hZo+mZyy3K1+Uiy+Uiy+Uiy9yzGWOmXJycuDt7S19z5eHOk2VoDm4Tk5OZtFpKi0tRXR0NCIiIqSHUsoB5eIL5eIL5eKLHHOZc6andeJoIDghhBBCiAGo00QIIYQQYgDqNHFOrovPUS6+UC6+UC6+yDEXr5locctKyMnJgbOzM7Kzs81iTBMhhBBCqs7Q73e60sQxURSRlpYGURRruikmRbn4Qrn4Qrn4IsdcPGeiThPH1Go14uLioFara7opJkW5+EK5+EK5+CLHXDxnok4TIYQQQogBqNNECCGEEGKAKg1fv3PnDlJSUpCRkQE7Ozu4u7ujadOmsLGxMVX7SAUEQTCrFVVNhXLxhXLxhXLxRY65eM5U6dlzUVFRWL9+PY4dO4bU1FSd7ZaWlmjbti0GDRqEsWPHonbt2iZrbE2j2XOE/DswtRrixXiwRxkQXN2gaBkIQams6WYRQqqJod/vBneatm/fjnnz5uH69etgjMHb2xtt27ZFnTp14OrqisLCQjx69AjXrl3DuXPnUFxcDGtra4waNQoLFy6El5eXycLVFHPrNImiiOTkZHh7e1f4VGbeUC6+yC2XKiYKJV8vB8tIk8oENw9YTZ4Bi6CwGmyZacjteGlQLn6YYyZDv98Nuj3XsWNHnD59Gi+88AKWLVuGoUOHol69euXWLy0txfHjx7Fp0yZs374dP//8MzZu3IhBgwZVPgkpl1qtRnx8POrWrWs2HzxToFx8MedcjDEUFBQY/oK448CS+WAAyt44YBlpKF7wHorfmw90Cn7qbuzs7Mz21oM5H6+qoFz84DmTQZ0mKysrHD16FOHh4Qbt1NLSEt26dUO3bt2wYsUKLFu2DElJSVVqKCGEVFZ+fv5Tn1quoQBwuUsr1LW2gkJPh0dkDHfnz0Hz2PN42uoyubm5cHBwqHyDCSFmzaBO0/Hjx41+AxcXF3z88cdGv54QQoxVmatMXVwcUd/GutztCkGAt401urg4IiYr96nvS50mQuSHr+tiRIsgCHB3dzfb2wDGolx8MedcdnZ2Btf1tLY0Wb3KvO+zZs7HqyooFz94zkTPnqsEcxsITgipWKXGNF2MBz6Y8fR6nywHWgZWWMWcxzQRQnRV+7Pn0tPT8fnnn2Po0KHo0aMHwsPDdX66detW6f2eOXMGU6ZMQfPmzWFvbw8fHx8MGzYM169f16l75coV9OrVCw4ODnB1dcWrr76K9PR0nXqiKOLzzz+Hv78/bGxs8Pzzz2Pr1q1G5TYnarUaV69e5XIp+opQLr6Ycy5BEGBvb2/Qj127jhDcPCren3sd2LXr+NR9mXOHyZyPV1VQLn7wnMmoxS0vXLiA8PBwZGZmoqILVcacOD777DPExsZi6NCheP7553H//n2sXr0aL7zwAk6ePIkWLVoAAFJSUhAcHAxnZ2csWrQIeXl5+OKLL3Dx4kWcPn0aVlZW0j7ff/99LFmyBBMmTEC7du2wZ88ejBw5EoIgYMSIEZX/CzAToiji2rVraNiwIZQyWkOGcvFFLrkEpRJWk2egeMF75daxmvQ29+s1yeV4PYly8YPnTEZ1mt555x08evQIH3zwAcaNG4f69eubLPiMGTOwZcsWrU7P8OHD0bJlSyxZsgSbNm0CACxatAj5+fn466+/4OPjAwBo3749evTogfXr12PixIkAgLt372LZsmWYPHkyVq9eDQAYP348QkJCMHPmTAwdOpS7g0YIqR4WQWHAvCW66zS514HVpLdlsU4TIcR4RnWa4uLiMHDgQCxcuNDU7UHnzp11yho3bozmzZvjypUrUtmuXbvQt29fqcMEAN27d0eTJk2wfft2qdO0Z88elJaWYtKkSVI9QRDw5ptvYuTIkYiLi0PXrl1NnoMQwieLoDAoOwejJP4szkUeQ+vwbrAKbMv9FSZCTK3S66D99zXZ2dnIyspCXl6e1gUSQ9XkmEGjOk1WVlZo2LChqdtSLsYYHjx4gObNmwN4fPUoLS0Nbdu21anbvn17HDx4UPrzuXPnYG9vj2bNmunU02wvr9NUXFyM4uJi6c85OTkAHi/eWVpaCgBQKBRQKpVQq9UQxf+t3qIpV6lUWrcwlUolFApFueWa/WpYWDw+RCqVSqdcEATUr19f676wpaUlRFHUKhMEARYWFuWWl9f2msgEPL7fXTaXHDKpVCqtXHLJBPzveCkUCtlkAgDWvBWsFVZQtGgBlSgC/20Pz5k07+vt7S2b4wQ8PkcA0DpvyCGTpu2aXOaWqbi42OB10EwpLy8PNjY2Js30ZLbyGNVpCgkJwdmzZ415qVE2b96Mu3fvSle2NM+80/doFi8vLzx69Eh6jEtqairq1Kmj0yvVvPbevXvlvu/ixYuxYMECnfIjR45IU4p9fHzQunVrXLhwQWsBz4CAADRt2hSnT5/WGpweGBgIX19fHD9+HLm5/1vrpVOnTvDw8MCRI0e0fnnCwsJga2ur1REEgIiICBQWFiIlJQUpKSkAHv/y9enTBxkZGYiLi5PqOjo6Ijw8HMnJyYiPj5fK3d3d0blzZ9y4cQPXrl2Tyms6U1RUFIDH49bklgkA7t+/L7tMFhYWUCqVSEtLk1WmPn36yDJT/fr1cfjwYVllyszM1DofyiFT2XNESkqK2WXSXHyoCabOZOgVM6OWHLh16xY6duyI2bNn4913363syyvl6tWr6NChA5o3b46YmBgolUrExMQgODgY27Ztw7Bhw7Tqf/TRR/j444+RmZkJFxcXdOvWDampqbh8+bJWPVEUoVQqMX36dKxcuVLve+u70uTt7Y2MjAxpSmJN/utErVbj/PnzaN68uTQuSw5XMIqLi3Hp0iUplxwyaa40aXJp/pXEeyYAUq7AwEAIgiCLTJpcV65ckSafyCGT5n3Pnz+P5557Tjpv8J7J0tISpaWluHDhgnTekEMmURRRUlIinTcsLCzMKpNSqURBQUGlMuXm5koXLe7cuQNnZ+dKZ3J0dIQoiibNlJOTAzc3N9M8e+7111/XKWvRogVmz56Nb775BoGBgXrfRBAE/Pjjj4a8hV6af5E7Oztj586d0i+4ra0tAGh1aDSKioq06tja2hpUTx9ra2tYW+uuEGxpaSldDtZQKpV6B5RrPjyGlj+534rKGWNISUnB888/r7VdoVDofZ5PeeXltb0mMmne98lcvGfSlGtyVdR23jIB0DpecskEAElJSWjRooXe+rxmKi0tRXJyMlq2bKmzjddMGpU5H/KQSdMR0OTSvJc5Zar0yvdqNYJcHOFpbQnnpAQ4depq1HhBU2cq77jo7MeQSuvXry932+3bt3H79m2926rSacrOzkbv3r2RlZWFmJgY1K1bV9qm6aVqbtOVlZqaCldXV6mz4+XlhaioKDDGtG7RaV5bdr+EEEIIqR6qmChg9TIcavPfMcbzZ6HQzQNWk2dwMzPVoE5TQkJCdbdDS1FREfr164fr16/j6NGjeO6557S216tXD+7u7nrHVZ0+fRqBgYHSnwMDA/HDDz/gypUrWvs5deqUtJ0QQggh1UcVE6V3DTSWkfa4fN4SLjpOBnWafH19q7sdErVajeHDhyMuLg579uxBp06d9NYbMmQINmzYgOTkZHh7ewMAjh07huvXr+Ptt9+W6g0YMABvv/021qxZI63TxBjDN998g3r16uld4oAXCoUCAQEBei/T8oxy8YVy8YVy8UUOuZhajZKvl1dYp2TNCig7B5v90h4GDwRft24dwsPDq70D9dZbb+HLL79Ev379dAZ5A8CoUaMAAMnJyWjdujVcXFwwffp05OXlYenSpahfvz7OnDmjNRZp1qxZWLp0KSZOnIh27dph9+7dOHDgADZv3oyRI0ca3DZ69hwhhBBSOer4v1D07qSn1rP5Yg2UgW2eQYt0Gfr9bnCnSaFQQBAE+Pr6IiwsTPqpV6+eyRoNAKGhofjjjz/K3V62uZcuXcKMGTPw559/wsrKCn369MGyZctQp04drdeIoojPPvsM3377LVJTU9G4cWPMmTMHr7zySqXaZm6dJpVKhdOnT6N9+/blDnLjEeXiC+XiC+XiixxyqSIPo3jRR0+tZz13ISzCez6DFuky9Pvd4CMwbdo0REdH4+LFi1i3bp00OLxhw4YICwtDeHg4QkNDdToslRUdHW1w3ebNm2utNVIehUKBOXPmYM6cOVVomflhjCE9Pb3C5//xiHLxhXLxhXLxRQ65BFc3k9arSQZ3mjRrGWVmZuKPP/5AVFQUoqOj8c8//+DmzZv44YcfAABNmzaVrkKFhoaidu3a1dJwQgghhJg/RctACG4eWs9zfJLgXgeKloHPrlFGqvS1vlq1amHgwIEYOHAgAODRo0eIjo6WOlGXL1/G1atXsXbtWr2LYRFCCCGEf5V69tz4ScCS+WAA9D01jo17EwX/XT/xaWry2XNGrQhenpKSEuzduxcLFy7EP//8o7M6MO/MbUyTKIrS7EGeZ1Y8iXLxhXLxhXLxxZxz5efnV2pxy/7utfB5Ex/Ut/nfRK3komLMvp6EvemZBu8nLy8P9vb2lWrr05h8ILg+KpUKp06dQlRUFKKiohAXF4fi4mIwxtC0aVOEhIRg7dq1xu7e7Jhbp4kQQgipKZXtNAGAAkCX/64Ifr+4FLFZuRCf+iptNdlpqtTtOVEUcebMGamTFBsbi8LCQgDAc889h3HjxiEkJATBwcHw8PCoWgLyVCqVCsePH0dwcDC3syr0oVx8oVx8oVx8MedcdnZ2yMvLq9RrGGPIyclBXFwcunXrZvDjS55835pi8BGIiIhAbGws8vLyIAgCWrZsifHjx0udJBrw/ewxxpCbm8v1rAp9KBdfKBdfKBdfzDmXIAhGXfGxtraGhYUF7O3tjeo01SSDO02//fYbFAoFBg8ejI8++ggtW7asznYRQgghhJgVg0eV9e/fH87Ozti1axcCAwPRqFEjTJgwAZs3b0ZKSkp1tpEQQgghpMZVaiA4Ywzx8fHS8gIxMTHIzs6GIAjw8/NDaGio9KN5HpycmNtAcFEUkZGRATc3N7ObVVEVlIsvlIsvlIsvcsxljpmeyew5URRx7tw5REZGIjo6Gn/++Sdyc3OlTlRYWJi06KUcmFuniRBCCCFVZ+j3e5W6eAqFAm3atMHMmTNx4MABZGZmYseOHWjevDkSEhKwbt26quyePEVpaSkOHDgguwVEKRdfKBdfKBdf5JiL50xVnr+Ympoq3a6LiorC7du3pW1KpbKquydPoVKparoJ1YJy8YVy8YVy8UWOuXjNVOlO04MHD7Qem3Ljxg0Aj8c7CYKAVq1aSc+eCw4ONnmDCSGEEEJqgsGdpkmTJiE6OhrXrl0D8L9OUosWLRAaGoqwsDCEhISgVq1a1dZYQgghhJCaYvBAcM0I92bNmklXkkJDQ/9Vi1qa20BwzaJnjo6ONfbwwupAufhCufhCufgix1zmmMnkj1HZunUrQkNDUadOHZM0kJiGra1tTTehWlAuvlAuvlAuvsgxF6+ZDJ49N3z4cOowmRmVSoWDBw9yO6CuPJSLL5SLL5SLL3LMxXMmgzpNSUlJVX6ju3fvVnkfhBBCCCE1xaBOU+PGjTF58mQkJCRUauelpaXYunUrmjdvjh9//NGoBhJCCCGEmAODOk2fffYZtm3bhkaNGiEkJASrVq3CmTNn9C5MlZKSgl27duGNN96Al5cXRo0aBV9fX4wcOdLkjSeEEEIIeVYMnj2XlZWF5cuX48cff0RqaioEQYBCoYCLiwtcXFxQVFSER48eoaio6PGOBQE9e/bEO++8g/Dw8GoN8ayY4+w5lUoFCwsLs5mBYAqUiy+Uiy+Uiy9yzGWOmart2XNqtRqHDh3CsWPHcOLECaSkpODhw4ewtbWFu7s7WrZsiZCQEAwYMAC+vr5VDmJOzLHTZG7TNk2BcvGFcvGFcvFFjrnMMVO1PXtOqVSib9++WLFiBU6dOoW7d++iqKgImZmZuH79Onbt2oVp06bJrsNkjlQqFaKioricgVARysUXysUXysUXOebiOVOVHthLCCGEEPJvQZ0mQgghhBADUKeJcxYWlX7mMhcoF18oF18oF1/kmIvXTJUeCP5vZm4DwQkhhBBSddU2EJyYD1EUkZaWBlEUa7opJkW5+EK5+EK5+CLHXDxnok4Tx9RqNeLi4qBWq2u6KSZFufhCufhCufgix1w8Z6JOEyGEEEKIAajTRAghhBBigCoNXz99+jTOnDmDrKwsvZfZBEHAhx9+WJW3IBUQBMGsVlQ1FcrFF8rFF8rFFznm4jmTUbPnHj16hIEDByI2NhYVvVwQBC7vWZaHZs8RQggh8lOts+dmzJiBP//8EyEhIVi3bh1+//13REVF6fxERkYa1fi8vDzMmzcPvXr1gqurKwRBwPr16/XW3b59Ozp27AgXFxfUrl0bISEhOHDggE49URTx+eefw9/fHzY2Nnj++eexdetWo9pnLkRRxJ07d7icgVARysUXysUXysUXOebiOZNRt+f279+P9u3b49ixY9VyeS0jIwMLFy6Ej48PWrVqhejoaL31Vq1ahWnTpqFPnz5YsmQJioqKsH79evTt2xe7du3C4MGDpbrvv/8+lixZggkTJqBdu3bYs2cPRo4cCUEQMGLECJNneBbUajXi4+NRt25dKBTyGZ5GufhCufhCufgix1w8ZzKq01RYWIjg4OBqux/p5eWF1NRUeHp64uzZs2jXrp3eeqtWrUK7du2wb98+qS2vv/466tWrhw0bNkidprt372LZsmWYPHkyVq9eDQAYP348QkJCMHPmTAwdOhRKpbJashBCCCFEHozq4gUGBiIxMdHETfkfa2treHp6PrVeTk4OPDw8tDpvTk5OcHBwgK2trVS2Z88elJaWYtKkSVKZIAh48803kZKSgri4ONMGIIQQQojsGHWlad68eejfvz9OnjyJjh07mrpNBgsNDcXOnTuxatUq9OvXD0VFRVi1ahWys7Mxffp0qd65c+dgb2+PZs2aab2+ffv20vauXbvq7L+4uBjFxcXSn3NycgAApaWlKC0tBQAoFAoolUqo1Wqt+7OacpVKpTVYXqlUQqFQlFuu2a+G5vk8KpVKb3nt2rW1tllaWkIURa0B+IIgwMLCotzy8tpeU5lUKpVWLrlkKptLLpk0/61duzYEQZBNJs3/u7u7gzGmtR+eM2na6ebmplXOeyZLS0swxrTOG3LIJIqi1nmDMlVfpiezlcegTtPGjRt1yvr06YOQkBC88soreOGFF8odbT569GiDGmKMr776ChkZGZg2bRqmTZsGAHBzc8OxY8fQqVMnqV5qairq1KmjczvRy8sLAHDv3j29+1+8eDEWLFigU37kyBHY2dkBAHx8fNC6dWtcuHABSUlJUp2AgAA0bdoUp0+fRnp6ulQeGBgIX19fHD9+HLm5uVJ5p06d4OHhgSNHjmh90MLCwmBra4uDBw9qtSEiIgKFhYV4+PAhjhw5AuDxB7VPnz7IyMjQunrm6OiI8PBwJCcnIz4+Xip3d3dH586dcePGDVy7dk0qr+lMUVFR0t+z3DIBQGRkpOwyWVhYwMLCAmlpabLK1KdPH1lmaty4sXTekEumrKwsrfOhHDKVPUccOXKEMlVjpoKCAhjCoCUHFAqFTofjyZfp226KJQc0Y5rWrVuHsWPHam3Ly8vD7NmzkZ+fj759+yI3NxcrVqxAeno6YmJi0KhRIwBAt27dkJqaisuXL2u9XhRFKJVKTJ8+HStXrtR5b31Xmry9vZGRkSF1EmuyJ69Wq3Ht2jU0bNhQGpMlhysYxcXFuHXrlpRLDplUKhXUarWUy8bGRhaZAEi5mjZtqvM7z2smTa7ExEQ0bNhQqy7PmTTve/36dfj7+0vnDd4zWVpaorS0FNevX5fOG3LIJIoiSkpKpPOG5h8nlMn0mXJycuDm5vbUJQcMutK0bt06Q6o9c0OHDoWFhQX27dsnlQ0YMACNGzfG+++/j23btgEAbG1ttTo/GkVFRdJ2faytrWFtba1TbmlpCUtLS60ypVKpdzC55sNjaPmT+62onDGGmzdvokmTJlrbFQqF3hkJ5ZWX1/aayKR53ydz8Z5JU67JVVHbecsEQOt4ySUTAOkfJfrq85pJ07lo1KiRzjZeM2lU5nzIQyZNR0CTS/NelMn0mcrLoLMfQyqNGTPGoJ09S7dv38Zvv/2G7777Tqvc1dUVXbt2RWxsrFTm5eWFqKgo6eqXRmpqKgCgbt26z6bRhBBCCOEWXwsklPHgwQMA0Hv7r7S0VOuyYGBgIAoKCnDlyhWteqdOnZK2E0IIIYRUxKhO0/79+zF48OByB1Dfu3cPgwcPxqFDh6rUuIo0atQICoUC27Zt07pPmZKSgpiYGLRu3VoqGzBgACwtLbFmzRqpjDGGb775BvXq1UPnzp2rrZ3VSaFQwMfHh7vFwZ6GcvGFcvGFcvFFjrl4zmTUs+d69+6Ne/fu4fz58+XWad26NerVq4f9+/cb1bDVq1cjKysL9+7dw9q1azF48GCpIzR16lQ4OztjwoQJ+OGHHxAWFobBgwcjNzcXa9asQWpqKiIjIxEcHCztb9asWVi6dCkmTpyIdu3aYffu3Thw4AA2b96MkSNHGtQmevYcIYQQIj8Gf78zI3h5ebEJEyZUWOc///kPq1evnjG7Z4wx5uvrywDo/UlISGCMMVZaWspWrVrFAgMDmYODA3NwcGBhYWEsMjJSZ39qtZotWrSI+fr6MisrK9a8eXO2adOmSrUpOzubAWDZ2dlG5zIllUrF/v77b6ZSqWq6KSZFufhCufhCufgix1zmmMnQ73ejro09evQIHh4eFdZxc3NDRkaGMbsHACQmJoIxpvfHz88PwOPR8FOmTMG5c+eQm5uL3NxcREZGIiwsTGd/CoUCc+bMQWJiIoqLi/HPP//glVdeMbp95kAURSQlJXH50MOKUC6+UC6+UC6+yDEXz5mM6jS5u7trLTalz7Vr1+Dq6mpUowghhBBCzI1Rnabg4GDs27cPFy5c0Lv9/Pnz2Lt3L0JCQqrUOEIIIYQQc2FUp2n27NkAgK5du2LhwoWIi4tDUlIS4uLisGDBAgQFBUm3w0j1USgUCAgI4HIGQkUoF18oF18oF1/kmIvnTEbNngOAXbt2YcyYMSgsLNQqZ4zBwcEBGzduxMCBA03RRrNBs+cIIYQQ+TH0+93obt6QIUNw+/ZtLF68GIMHD0a3bt0wZMgQfP7557h165bsOkzmSKVS4cSJEzrP9+Ed5eIL5eIL5eKLHHPxnMmgx6iUx8PDA7NmzTJVW0glMcaQnp6u8/Bk3lEuvlAuvlAuvsgxF8+Z+LuhSAghhBBSA6rUadq8eTN69OgBd3d3WFtbw93dHT169MCWLVtM1T5CCCGEELNg1O05tVqNYcOGYffu3WCMwcbGBnXr1sWDBw9w7NgxREZGYteuXdixYweXo+N5oVQqERgYCKVSWdNNMSnKxRfKxRfKxRc55uI5k1E9mq+++gq//vorunTpgtjYWBQUFCAhIQEFBQU4ceIEunbtit27d2PVqlWmbi8pQ6FQwNfXV3YdU8rFF8rFF8rFFznm4jmTUS3esGEDmjRpgmPHjqFTp05a2zp27IijR4+iSZMmWLdunUkaSfRTqVSIjIzkcgZCRSgXXygXXygXX+SYi+dMRnWarl+/jv79+8PS0lLvdktLS/Tr1w/Xr1+vUuNIxRhjyM3N5XIGQkUoF18oF18oF1/kmIvnTEZ1mqysrJCfn19hnfz8fFhZWRnVKEIIIYQQc2NUp6l169bYvn077t27p3d7amoqtm/fjhdeeKFKjSOEEEIIMRdGPUZl3759GDBgADw9PfHOO+8gJCQEderUwYMHDxAdHY3ly5fjwYMH2LNnD/r27Vsd7a4R5vYYFVEUkZGRATc3Ny4H1JWHcvGFcvGFcvFFjrnMMZOh3+9GP3tu+fLleO+996BWq7XKGWOwsLDAZ599hrffftuYXZstc+s0EUIIIaTqqv3ZczNmzMDVq1cxf/58DBw4EOHh4Rg4cCAWLlyIq1evyq7DZI5KS0tx4MABlJaW1nRTTIpy8YVy8YVy8UWOuXjOVKVnzzVo0AAffvihqdpCjMDjlE1DUC6+UC6+UC6+yDEXr5lMcjNRpVIhMzOT278EQgghhJCnMbrTpFarsWLFCrRq1Qo2NjZwc3ODjY0NWrVqhZUrV1IHihBCCCGyYtRA8Ly8PPTs2RMnT56EQqGAt7e3NHsuOTkZoiiiU6dOOHz4MOzt7auj3TXC3AaCaxYIc3R0hCAINd0ck6FcfKFcfKFcfJFjLnPMVK0DwT/66CPExcXh5Zdfxq1bt3D79m3ExcXh9u3buHXrFkaMGIETJ07go48+MjoAMYytrW1NN6FaUC6+UC6+UC6+yDEXr5mM6jRt374dbdu2xaZNm+Dj46O1zcfHB5s3b0abNm2wbds2kzSS6KdSqXDw4EHZ3QqlXHyhXHyhXHyRYy6eMxk1e+7hw4cYPXp0hXW6d++OL7/80qhGEWLOGGMoKCio9Guys7ORlZWFvLw8ox4xZGdnZzaXsgkh5N/IqE5T48aNkZaWVmGd9PR0NGrUyKhGEWLOCgoK4ODg8MzfNy8vT1ZjBAkhhDdG3Z6bPn06tm3bhkuXLundfvHiRfz888946623qtI2QgghhBCzYdTsuePHj2PZsmU4cuQIxowZg65du0qz52JiYrBx40b07NkTM2bM0HltcHCwSRpeE8xx9pxKpYKFhYWsbtuYey5jbs/l5+bgpWYB8LS2xJqt22Dbpj2gVFZqH+Z6e87cj5exKBdfKBc/zDFTtT57TqFQQBAEaF5aNrS+srKefFYdT8yx02Ru0zZNQW65VDFRKF69DHiYLpUJbh6wmjwDFkFhNdgy05Db8dKgXHyhXPwwx0yGfr8bNabpo48+Mpug/2YqlQpRUVGIiIiApaVlTTfHZOSUSxUTheIF7+mUs4y0x+XzlnDfcZLT8SqLcvHFnHMZc3Va87rs7GwcPXoU/fv3r/QEEnO9Om3Ox+ppjOo0zZ8/38TNIER+mFqNkq+XV1inZM0KKDsHQ6jkrTpCCD9o8oh8mOTZc4QQXeLFeLCMimeZsvQHEC/GP5sGEUIIqRKjrjRpnDt3Dlu3bsXVq1dRUFCAo0ePAgDu3LmDU6dOoXv37nB1dTVJQ4l+FhZVOoRmSw652KMMk9YzZ3I4XvpQLr6Yay47Ozvk5eVV+nX5+fmoU6cOACAlJQUuLi6Vfl9zZa7H6mmMGggOALNmzcKyZcu0Bn5rBnknJiaiUaNGWLZsGaZPn2661tYwcxsITsybOv4vFL076an1bL5YA2Vgm2fQIkIIT/Lz86XbenSrrXpV67Pn1q1bhy+++AJ9+/bFhQsXMGfOHK3tfn5+aN++Pfbu3WvM7pGXl4d58+ahV69ecHV1hSAIWL9+vd66oihi7dq1CAwMhK2tLWrXro3w8HCcP39ep97nn38Of39/2NjY4Pnnn8fWrVuNap+5EEURaWlpEEWxpptiUnLJpWgZCMHNo8I6gnsdKFoGPpsGVRO5HK8nUS6+yDKXWo0gF0cMreMK8cLfYBzPPi+L52NlVKdpzZo1aNasGXbt2oUWLVroHdHftGlT3Lhxw6hGZWRkYOHChbhy5QpatWpVYd3XX38d06ZNQ5s2bbBq1Sp89NFH8PHx0Vmx/P3338fs2bPRo0cPrFq1Cj4+Phg5ciR+/vlno9poDtRqNeLi4rhexkEfueQSlEpYTdZdq6wsq0lvcz8IXC7H60mUiy9yy6WKiQImjMShNs2wrkUjKD58F4WvDHxczjmej5VRNxUvX76MCRMmVHhPsk6dOk991Ep5vLy8kJqaCk9PT5w9exbt2rXTW2/79u3YsGEDfvnlFwwaNKjc/d29exfLli3D5MmTsXr1agDA+PHjERISgpkzZ2Lo0KFQcv7FRZ6dSk0ffqE98N58iN+tguLRw/+Vu7kD4yej+IX2KM7PN2hX5jp9mBBiWv+GpUp4ZVSnycLCAiUlJRXWuXfvntFTLK2treHp6fnUesuXL0f79u0xaNAgiKKIwsJCvfd89+zZg9LSUkya9L/xJYIg4M0338TIkSMRFxeHrl27GtVW8u9jzPRhBYAuLo7wtLbE/eJSxGadhrjtQKX2QWMaCJE/WqrEvBnVaWrZsiUiIyOhVqv1XqHRzKRr06b6Brfm5OTg9OnTmDRpEubOnYtVq1YhLy8P/v7+WLJkCYYNGybVPXfuHOzt7dGsWTOtfbRv317arq/TVFxcjOLiYq33BIDS0lKUlpYCeLw6ulKphFqt1ro/qylXqVQoO9ZeqVRCoVCUW67Zr4bmap5KpdJb7uDgoLXN0tISoihqXfYUBAEWFhbllpfX9prKpFKptHKZWyZjiABisnKNeq2GWq3W+rs0h+Ok+a+DgwMEQTCr41SVTJr/d3R0BGNMaz88Z9K088nzBu+ZLC0twRjTysVrJvHC3wYtVVISfxaK51/gIpOG5lxe9hxvLsfpyWzlMarT9Prrr2P8+PF44403pNtdGjk5ORg/fjzu37+PL7/80pjdG+TWrVtgjOHnn3+GhYUFPv/8czg7O+PLL7/EiBEj4OTkhF69egEAUlNTUadOHZ1bG15eXgAeXxXTZ/HixViwYIFO+ZEjR6SpnD4+PmjdujUuXLiApKQkqU5AQACaNm2K06dPIz39f4/PCAwMhK+vL44fP47c3P99iXbq1AkeHh44cuSI1gctLCwMtra2OHjwoFYbIiIiUFhYiLy8PBw5cgTA4w9qnz59kJGRgbi4OKmuo6MjwsPDkZycjPj4eKnc3d0dnTt3xo0bN3Dt2jWpvKYzRUVFSX/P5pjJ3d0dO3fu1MoUHBwMW1tbHD58WCtTz549UVhYiD/++EPqgNvb26N3795IS0vD6dOnpboODg4IDQ1FUlISLly4IJW7ubmhY8eOSEpKwvXr183uOAGPP3sWFhZIS0szm+Nkikx9+vSRZaaWLVtK5w25ZMrKytI6H/Kaqc71S2iJpzsXeQwPUu5zkQnQ//105MgRszlOhg65MHrJAc0gagcHB7i4uODu3bto06YNrly5gvz8fIwdOxb/93//Z8yutWjGNK1btw5jx46VymNiYqSH/548eRIdOnQAAOlqU0BAAP78808AQLdu3ZCamorLly9r7VsURSiVSkyfPh0rV67UeW99V5q8vb2RkZEhTUmsyZ68KIq4c+cO6tWrB4Xi8Zh+c7sqY8y/TkpKSnD37l0plxwyqVQqiKIo5bK2tpZFJgBSLj8/PwCQRSZNrvv376NevXpa++Y5k2Y/SUlJ8PLyks4bvGeytLSESqVCUlKSdN7gNZN44W+oZk/D01h89hW3V5pKS0ulc6FSqTSL45STkwM3N7fqefYcAGzZsgVhYWFYvXo1/vnnHzDGcPbsWTRr1gzTpk3Df/7zH2N3bRBbW1sAgL+/v9RhAh7/a71fv37YtGmT9BRlW1tbrc6PRlFRkda+nmRtbQ1ra2udcktLS53n5SiVSr23bsobLF9eeXnP4dFXLooiLl68CG9vb63tCoVCOhmWVV55eW2viUyadj6Zi/dMlpaWKC0tlXJV1HaeMgHQymVpaSmLTMDjXPHx8ahbt67e+jxmAh7nOn/+POrVq6ezjddMwOMJGpU5Hz7LTBVNHtEZH9y4GVDbXesB3zrc3KFq3Awo81p933GaOyLmdJw0x0NzrDTvVdPHydBn4FVpSc4JEyZgwoQJKCwsRGZmJpycnJ7Z83Xq1q0LANJqqWV5eHigtLQU+fn5cHZ2hpeXF6KiosAY07pFl5qaqrUvQgghxNQqO3mkv3stbGrZCACgKPOdJf73SsmoyBPY6+z81P3Q5BHTM8mz52xtbVG3bt1n+kDCunXrwtPTE3fv3tXZdu/ePdjY2MDR0RHA43uaBQUFuHLlila9U6dOSdsJIYQQc7A3PROjLt7EvWLtq1B3i0sw6uJN7E3PrKGWET4f/vJfw4cPx5dffonff/8dPXr0APB4Ycw9e/YgPDxcutQ3YMAAvP3221izZo00cJ0xhm+++Qb16tVD586dayxDVQiCAHd3d9mt3UO5+EK5+EK5nj1jnz3HVCrknT2FO+f+RkDHTvB+vjW2VGIGr7k+e86cj9XTGD0QvLqtXr0aWVlZuHfvHtauXYvBgwejdevWAICpU6fC2dkZDx48QOvWrZGXl4cZM2bA2dkZ33zzDZKTkxEXF6e1mvisWbOwdOlSTJw4Ee3atcPu3btx4MABbN68GSNHjjSoTfTsOUIIIUR+DP5+Z2bK19eXAdD7k5CQINW7desWGzRoEHNycmK2trYsPDycnT59Wmd/arWaLVq0iPn6+jIrKyvWvHlztmnTpkq1KTs7mwFg2dnZVY1nEiqVil25coWpVKqabopJUS6+UC6+UC6+yDGXOWYy9PvdJGOaqkNiYiIYY3p/NFObAaBBgwb45ZdfkJ2djYKCAhw7dkzvY1cUCgXmzJmDxMREFBcX459//sErr7zyDBOZniiKuHbtGpcPPawI5eIL5eIL5eKLHHPxnMlsO02EEEIIIebEqE7Txo0bdVY+JoQQQgiRM6M6TePGjcNvv/1m6raQSlIoFPDx8dG7IBjPKBdfKBdfKBdf5JiL50xGzZ7z8fHBgAEDsGrVqupok9mi2XOEEEKI/Bj6/W5UN69///74/fff9S7bTp4dtVqNc+fOaT2vRw4oF18oF18oF1/kmIvnTEZ1mj799FPY29tj8ODBuHTpkqnbRAwkiiKSkpK4nIFQEcrFF8rFF8rFFznm4jmTUSuCt27dGsXFxYiPj8dvv/0GGxsbeHh46KzuKQgCbt26ZZKGEkIIIYTUJKM6TaIowsrKCj4+PlrlTw6PMmK4FCGEEEKIWTKq05SYmGjiZhBjKBQKBAQEmO0MBMYYCgoKKv2a3NxcuLm5oaCgABYWlf+I2tnZmeUzjcz9eBmLcvGFcvFFjrl4zmS2z54zRzR7rnLy8/Ph4ODwzN83Ly8P9vb2z/x9CSGE8KlaZ8+VdfnyZfzyyy/46aefqrorUkkqlQonTpyASqWq6aYQA8j1eFEuvlAuvsgxF8+ZjLo9BwBnzpzBhAkTcPHiRans1VdfBQAcP34cvXr1ws8//4z+/ftXvZUyZ+xtrOzsbNy4cQPNmjWDlZVVpd+3um9j2dnZIS8vr1Kvyc/PR506dQAAKSkpcHFxMep9zRFjDOnp6bIb60e5+EK5+CLHXDxnMqrTdOnSJYSHh0OhUODtt9/G1atXcejQIWl7UFAQ3NzcsGPHDuo0GaCgoECWt7EEQajS/u3t7ek2GyGEELNh1O25efPmAQD++usvfPHFF2jXrp3WdkEQ0KlTJ5w5c6bqLSSEEEIIMQNGXWn6448/MGTIEDRq1KjcOj4+PvR8OgNV9TZWamoqHB0djXpfc6ZUKmu6CSalVCoRGBhIuThBufhCufjBcyajOk25ubnw8PCosE5hYSGXS6TXhKrexnJ0dJTlbSwep6NWRKFQwNfXt6abYXKUiy+Uiy9yzMVzJqO+lby9vbUGgOvz999/o2HDhkY1ilQOjzMQyqVWI8jFEUPruEId/xeYjDreKpUKkZGR8jpeoFy8oVx8kWMunjMZ1Wnq27cvjhw5gqNHj+rdvn37dpw8eRIDBw6sStuIgXicgaCPKiYKmDASh9o0w7oWjaCcNxOFrwx8XC4DmoU75XK8NCgXXygXX+SYi+dMRnWa5s6di7p16yIiIgITJkzA2bNnAQBr1qzBq6++ipEjR8LPzw8zZswwaWNJGWWuyAj/nOf+iowqJgrFC94DHqZrlbOMNBQveE82HSdCCCH8MmpMk7u7O/744w+8+uqr+PHHH6XyKVOmAAA6dOiArVu3wtnZ2TStJFpUMVHA6mU41KbZ44L5s1Do5gGryTNgERRWs40zAlOrUfL18grrlKxZAWXnYAgcDhwkhBAiD1V+jEp8fDxOnjyJR48ewcnJCR06dNBZgkAuzOExKtIVmXJYz1vCXcdJHf8Xit6d9NR6Nl+sgTKwzTNoUfUQRREZGRlwc3OT1SB3ysUXysUXOeYyx0yGfr8bvSK4RmBgIAIDA6u6G2IAuV6RYY8yTFrPXCkUiqfOOuUR5eIL5eKLHHPxnKnKXbyHDx8iMjISv/76KyIjI/Hw4UNTtIvoIV6MB8tIq7AOS38A8WL8s2mQiQiubiatZ65KS0tx4MABlJaW1nRTTIpy8YOp1Sj+6xTOLPsMxX+d4n4sZFlyPF6APHPxnMnoK02JiYmYPn06Dhw4oDUCXhAE9O3bFytXroSfn58p2kj+S65XZBQtAyG4eVTYIRTc60DRMvDZNaqa8DjF1hCUy/ypYqJQ8vVysIw0PAdAdegXqDkeC6mPnI5XWXLMxWsmozpNt27dQpcuXZCWlobGjRujS5cuqFOnDh48eIATJ05g7969OHnyJE6cOIEGDRqYus3/WnK9IiMolbCaPKPCsVpWk97m6pYjIeakvLGQmtmp4HAsJCE1wahO0+zZs5Geno5vvvkGEyZMgCAI0jbGGL777jtMmjQJs2fPxo4dO0zWWLlijKGgoODpFRs0Bmq760zL1+LmjqIGjYH8/Kfuzs7OTuvY1SSLoDBg3hIUr16mlU9wrwOrSW/TCZ2QMjQDaQ2rrIbtqqUQAOj7bWcACld/gcLGTQFFxf8wMaeBu4TUBKNmz9WqVQuhoaH49ddfy60zYMAAHD9+HJmZmVVqoDmprtlz+fn5cHBwMKhuf/da2NTy8TP/FGU6POJ/D+OoizexN92wv/O8vDyze/xKfk4OevvWh6e1Jf7v1z2wb99JNleYNAu6OTo6mk1n1RQo17OXlpYmPXvyaYJcHP+3PEkFev91BTFZuRXWefDggdkO4DXn41UVcsxljpmqdfacWq1G8+bNK6zTokULREXRgoSmtjc9E6Mu3sTnTXxQ38ZaKr9bXILZ15MM7jA9CwZfQSsjv6hIOnGvatAYQlFRpd/XnK6gPcnW1ramm1AtKJf58rS2NGk9cyaH46WPHHPxmsmoTtMLL7yAS5cuVVjn0qVLaNu2rVGN+rexs7NDXl5epV7DVCrknTmJS3/G4IXu3eH9/AvYUskrMnZ2dpWqX1kFBQUGX0HTx9PT06jXmeMVNODxwMeDBw8iIiIClpb8f0FpUK5nz83NDQ8ePDCoruLyReCTuU+tt+bn7Vj9XMunvq+5MufjVRVyzMVzJqM6TZ9++im6deuGH374AePHj9fZ/t133+Hw4cM4duxYlRv4byAIglFf8tYh4cjNL4J9hy7cffAIIcarzDo3rHYoCg2YnVo7KFQ2t8IJqS5GdZqOHTuGsLAw/Oc//8GyZcu0Zs/Fxsbi+vXr6NmzJ44ePar1UF9BEPDhhx+arPHEvBl1BY0xZGdn4+jRo+jfvz+srKyMel9CyGM0O5UQ0zGq0zR//nzp/69du4Zr167p1Pntt9/w22+/aZVRp+nfxegraNbWcHFxgYODA11BI8QENLNTNes0adDsVEIqx6jZc3/88YfRbxgSEmL0a2uaOTx7rizGGFQqFSwsLMx24LMxKBdfKBc/mFoN9cVzUKWnwcLdA8qWrWVzhUmOxwuQZy5zzFSts+equ+OTl5eHpUuX4tSpUzh9+jQyMzOxbt06jB07ttzXlJaWolWrVrhy5QqWLl2Kd999V2u7KIr44osvsHbtWqSmpqJJkyaYM2cOXn755WrNUt0KCwvh6OhY080wOcrFF8rFB0GphLJVGxTk5sLajKZ7m4rcjpeGHHPxmsksVynLyMjAwoULceXKFbRq1cqg16xatQpJSUnlbn///fcxe/Zs9OjRA6tWrYKPjw9GjhyJn3/+2VTNfuZUKhWioqK4XY6+PJSLL5SLL5SLL3LMxXMms+w0eXl5ITU1FXfu3MHSpUufWj8tLQ0LFy7E7Nmz9W6/e/culi1bhsmTJ+O7777DhAkTsG/fPgQFBWHmzJlQy+ihlYQQQgipHmbZabK2tq7UGj3vvfceAgICMGrUKL3b9+zZg9LSUkyaNEkqEwQBb775JlJSUhAXF1flNhNCCCFE3syy01QZp0+fxoYNG7By5cpy78+fO3cO9vb2aNZM+1EC7du3l7bzysLCqGFpZo9y8YVy8YVy8UWOuXjNxGer/4sxhqlTp2L48OHo1KkTEhMT9dZLTU1FnTp1dDpVXl5eAIB79+7pfV1xcTGKi4ulP+fk5AB4POi8tLQUwONF5pRKJdRqNURRlOpqylUqFcpOUFQqlVAoFOWWa/aroflgPXnv18LCAhYWFnjxxRelNgGApaUlRFHUuuUoCAIsLCzKLS+v7TWRSaNsLjlk0pRrcgGQTSZNLjkdJ40+ffpAFEWt/fCeydLSEr1794ZarZb2JYdMSqVS67whh0yadmhyaWacUSbTZ3oyW3m47jStX78eFy9exM6dOyusV1hYCGtra51yGxsbabs+ixcvxoIFC3TKjxw5Ii2g6OPjg9atW+PChQtaA9EDAgLQtGlTnD59Gunp6VJ5YGAgfH19cfz4ceTm/u/hmJ06dYKHhweOHDmi9UELCwuDra0tDh48qNWGiIgIFBQUIDo6WiqzsLBAnz59kJGRoXXL0dHREeHh4UhOTkZ8fLxU7u7ujs6dO+PGjRtaa23VZKbCwkKtZxZSJj4yKZVKREREyCqThYUF2rVrB1EUcerUKdlk6t27N27evIkrV67IJlOfPn2QlpYmq+Mkt3OEuWcy9DmpRq3T9CydPXsW7dq101lyICcnB02aNMF//vMfqWOTmJgIf39/nSUH+vbtiytXruDWrVta+y4oKIC9vT3ee+89LF68WOe99V1p8vb2RkZGhrSOQ0325EtLS3Ho0CH06NFDWgRSDv/aLywsxO+//y7lkkMmlUqF0tJSKZednZ0sMgGQckVEREjt4T1T2Vy9evWCQvG/kQw8Z9I4ePCg1nmD90yWlpYoLi7Gb7/9JuWSQyZRFFFUVCSdN6ysrChTNWXKycmBm5tb9azTZA6++OILlJSUYPjw4dJtuZSUFABAZmYmEhMTUbduXVhZWcHLywtRUVFgjGndoktNTQUA1K1bV+97WFtb671CpfkiL0upVEKpZ5G48u7bllde3grY+so1WZ5sj0Kh0DrJP628vLbXRKay5WVzySVT2f+XUyYNuWaqTFZzz1T2Vv6T23jNBEBqt6HnQ14ylT0fat6LMpk+k6FPnzBoILimB1fZn+oc6JWUlITMzEw0b94c/v7+8Pf3R1BQEABg0aJF8Pf3x+XLlwE8vjxXUFCgdTkagHQpNzAwsNraSQghhBB5MKhXExwcrDOIOjMzExcuXIBSqYS3t7f0wN7k5GSo1Wo8//zzqFWrVrU0GgCmTZuGgQMHapWlpaXhP//5D8aOHYsBAwbA398fADBgwAC8/fbbWLNmDVavXg3g8SDyb775BvXq1UPnzp2rrZ3VSRAEOMpwVV/KxRfKxRfKxRc55uI5k1FjmlJSUtClSxcEBQVh0aJF8PHxkbYlJSVhzpw5iI2NxZ9//on69esb1bDVq1cjKysL9+7dw9q1azF48GC0bt0aADB16lQ4OzvrvKa8MU0AMGvWLCxduhQTJ05Eu3btsHv3bhw4cACbN2/GyJEjDWqTuT17jhBCCCFVZ/D3OzPC8OHDWYcOHSqs06FDBzZixAhjds8YY8zX15cB0PuTkJCg9zUJCQkMAFu6dKnONrVazRYtWsR8fX2ZlZUVa968Odu0aVOl2pSdnc0AsOzsbGMimZxarWaJiYlMrVbXdFNMinLxhXLxhXLxRY65zDGTod/vRi1uefToUXTr1q3COuHh4Th69Kgxuwfw+KoRY0zvj5+fn97X+Pn5gTGmc5UJeDwua86cOUhMTERxcTH++ecfvPLKK0a3zxyo1WrEx8fL7jEwlIsvlIsvlIsvcszFcyajOk1FRUXSzLPy3Lt3r9z1jwghhBBCeGNUp6lNmzb4+eefy31m24kTJ7Bt2za0a9euSo0jhBBCCDEXRq0J8Omnn6Jbt24ICgpCv3790LVrV3h4eCAtLQ0xMTHYv38/LCws8Mknn5i6vaQMQRDg7u7O5QyEilAuvlAuvlAuvsgxF8+ZjF4R/NixY5g4cSISEhIe70gQpNU2/f398d133z113BNvaPYcIYQQIj+Gfr8bvfpkt27dcPPmTfz55584f/48srOz4ezsjFatWqFr165c9iB5o1arcePGDTRu3Fjvyqi8olx8oVx8oVx8kWMunjNVacluQRAQFBQkrcRNni1RFHHt2jU0bNiQuw9eRSgXXygXXygXX+SYi+dMVX7OyeXLl3H16lXk5+fj1VdfNUWbCCGEEELMjlGz5wDgzJkzCAwMRMuWLTF06FCMHTtW2nb8+HHY2dlh7969pmgjIYQQQkiNM6rTdOnSJYSHhyMhIQFvv/02evfurbU9KCgIbm5u2LFjh0kaSfRTKBTw8fHR+2RonlEuvlAuvlAuvsgxF8+ZjJo999JLL+Hw4cM4d+4cGjVqhAULFmDhwoVaq3sOHz4c58+fx9WrV03a4JpEs+cIIYQQ+TH0+92obt4ff/yBIUOGoFGjRuXW8fHxeeqq4aRq1Go1zp07x+VS9BWhXHyhXHyhXHyRYy6eMxnVacrNzYWHh0eFdQoLC7n8C+GJKIpISkqCKIo13RSTolx8oVx8oVx8kWMunjMZ1Wny9vbGxYsXK6zz999/o2HDhkY1ihBCCCHE3BjVaerbty+OHDmCo0eP6t2+fft2nDx5EgMHDqxK2wghhBBCzIZR6zTNnTsXO3fuREREBMaMGYP79+8DANasWYO4uDhs3boVfn5+mDFjhkkbS7QpFAoEBARwOQOhIpSLL5SLL5SLL3LMxXMmo589d/v2bbz66quIi4vT2dahQwep4yQnNHuOEEIIkZ9qnT0HAA0aNEBsbCz+/vtvrFmzBp988gm++uornDp1CnFxcbLrMJkjlUqFEydOQKVS1XRTTIpy8YVy8YVy8UWOuXjOVOXHqAQGBiIwMNAETSGVxRhDeno6jLxYaLYoF18oF18oF1/kmIvnTPzdUCSEEEIIqQFGX2nKzc3Fjz/+iPPnz+PevXsoLS3VqSMIAo4dO1alBhJCCCGEmAOjOk1nzpxB7969kZmZWeHlNUEQjG4YeTqlUonAwEAolcqabopJUS6+UC6+UC6+yDEXz5mMmj3XuXNnnD59GosXL8bLL78MLy8vLsNXFs2eI4QQQuSnWmfPnTt3DiNGjMDMmTNRv379f0WHyRypVCpERkZyOQOhIpSLL5SLL5SLL3LMxXMmozpNrq6ucHd3N3VbSCUxxpCbm8vlDISKUC6+UC6+UC6+yDEXz5mM6jQNHDgQkZGRXD5sjxBCCCHEGEZ1mhYvXgxLS0u88soruHv3rqnbRAghhBBidox+jMrff/+N7t27Izs7G7Vq1dI7cEoQBNy6davKjTQX5jYQXBRFZGRkwM3Njctn+JSHcvGFcvGFcvFFjrnMMZOh3+9GdZqOHTuGfv36oaioCJaWlvDw8ICFhf7VCxISEiq7e7Nlbp0mQgghhFRdtc6emz17Nhhj2LZtG4qKipCcnIyEhAS9P6T6lJaW4sCBA3oXFuUZ5eIL5eIL5eKLHHPxnMmoxS0vX76MUaNGYejQoaZuD6kkHqdsGoJy8YVy8YVy8UWOuXjNZNSVJnd3d9ja2pq6LYQQQgghZsuoK02vvPIKduzYgcLCQuo8ESITjDEUFBRU+jXZ2dnIyspCXl4erKysKvV6Ozs7etwSIYQbRg0ELykpwfDhw/Ho0SMsWrQIrVq1goODQ3W0z6yY20BwzQJhjo6OsvrioVw1Iz8//5n/Hufl5cHe3v6ZvqehzP14GYty8UWOucwxk6Hf70ZdadJcXWKMITg4uNx6giBwe9+SF3K90ke5+KAA0MXFEZ7WlrhfXIrYrFzIaclbuR0vDcrFFznm4jWTUWOagoKCEBwcjJCQEAQHB5f7ExQUVOl95+XlYd68eejVqxdcXV0hCALWr1+vVUcURaxfvx79+/eHt7c37O3t0aJFC3zyyScoKirSu98ff/wRzZo1g42NDRo3boxVq1YZE92sqFQqHDx4UHYdU8pVM+zs7JCXl2f4z+8HkTW0Nw61aYZ1LRrhUJtmyBnWB3m/HzR4H3Z2djUdu1zmfryMRbn4IsdcPGcy6kpTdHS0iZvxPxkZGVi4cCF8fHzQqlUrve9VUFCA1157DR07dsQbb7wBDw8PxMXFYd68eTh27BgiIyO1Lvl9++23eOONNzBkyBDMmDEDMTExmDZtGgoKCjB79uxqy0IITwRBMPhWmSomCsVL5uv+q+thOrBkPqznLYFFUJjJ20gIITXJqE5TdfLy8kJqaio8PT1x9uxZtGvXTqeOlZUVYmNj0blzZ6lswoQJ8PPzkzpO3bt3BwAUFhbi/fffR58+fbBz506priiK+PjjjzFx4kTUqlXr2YQjRAaYWo2Sr5dXWKdkzQooOwdDUCqfUasIIaT6mcf65WVYW1vD09OzwjpWVlZaHSaNQYMGAQCuXLkilUVFReHhw4eYNGmSVt3JkycjPz8fBw4cMEGrCfn3EC/Gg2WkVViHpT+AeDH+2TSIEEKeEaOuNIWHhxtUTxAEHDt2zJi3MMr9+/cBAG5ublLZuXPnAABt27bVqtumTRsoFAqcO3cOo0aN0ru/4uJiFBcXS3/OyckB8Hg1U81KpgqFAkqlEmq1GqL4vyGwmnKVSoWyExSVSiUUCkW55U+ukKp5PM2T934tLCygVCrRo0cPMMak11laWkIURajVaqmuIAiwsLAot7y8ttdEJuDxBIOyueSQSbMfTS4A3GbCwwwYojT9AZhKxUUmfeWMMUREROjsh5fjVFF5r169tM4bvGeytLSEQqHQOm/IIZMoilrnDRXHv0/mnsnQ1cmrZUyTIAhgjD3zqYSff/45nJyc0Lt3b6ksNTUVSqUSHh4eWnWtrKxQu3Zt3Lt3r9z9LV68GAsWLNApP3LkiDSA1cfHB61bt8aFCxeQlJQk1QkICEDTpk1x+vRppKenS+WBgYHw9fXF8ePHkZubK5V36tQJHh4eOHLkiNYHLSwsDLa2tjh48KBWGyIiIlBQUKB1LCwsLNCnTx9kZGQgLi5OKnd0dER4eDiSk5MRHx8vlbu7u6Nz5864ceMGrl27JpXXZKbCwkJERUVRphrIdPjwYeTn50vlwcHBsLW1xeHDh7Uy9arnadCJ489/LqO4hCE0NBRJSUm4cOGCtM3NzQ0dO3ZEUlISrl+/Xm2ZqnqcgoKCUFhYiJMnT0rlNX2cqpopIiIC9+7d0zoevGfS/D7J6TiZ4zlCzpkMXaPOqHWaypOTk4O///4bc+fORf369bF161YoqzCmQTOmad26dRg7dmyFdRctWoT3338fa9aswZtvvimVjxs3Dlu3btX7F+Lj44MXXngBu3fv1rtPfVeavL29kZGRIa3jUJM9+dLSUhw6dAg9evSApaUlAHlcaSosLMTvv/8u5ZJDJpVKhdLSUimXnZ2d2WXKysoyaHyfAsDlLq1Q19oKCj3/MBIZw93iEjSPPf/U5Qeys7O1ph6bw3HS0ByvXr16aT2JvaaPU1X/tQ8ABw8e1Dpv8J7J0tISxcXF+O2336RccsgkiiKKioqk84aVlRVlqqZMOTk5cHNzq551msrj5OSE0NBQHD58GC1btsSnn36Kjz76yJRvode2bdvwwQcfYNy4cVodJuDxWhAlJSV6X1dUVFThWhHW1tawtrbWKdd8kZelVCr1dhA1Hx5Dy5/cb0Xlmit5T7ZHoVBoneSfVl5e22siU9nysrnkkqns//OS6UkigFnXk7CpZSOIjGl1nMT/npRmX08yaL0mpVKp933N4ThpKBSKcsvN6TgZWl72Vv6T23jNBEBqt6HnQ14ylT0fat6LMpk+k6Hnv2qZPefo6IjevXtj3bp11d5p+v333zF69Gj06dMH33zzjc52Ly8vqNVqpKWlad2iKykpwcOHD1G3bt1qbR8hvNCs02SwuONg33/9eJmB/1K4ewDjJ2NLp/IXvX3yPQkhhBfVtuSAQqFAampqde0eAHDq1CkMGjQIbdu2xfbt2/X2JAMDAwE8vtUXEREhlZ89exaiKErbeVVe75l3lOvZq8w6TQCA7r3Bwl5ESfxZXIiOxvOhobAKbCurZQbM+XhVBeXiixxz8ZrJpGOaNG7fvo2OHTvC1dUVV69eNXo/FY1punLlCoKCguDp6YmYmJhyx2IUFhaifv366Ny5M/bt2yeVv/rqq/jll1+QnJwMV1dXg9pjbs+eI4QQQkjVVeuz515//XW95SqVCnfv3sWff/6J0tJSLFy40JjdY/Xq1cjKypJmtu3btw8pKSkAgKlTp0KhUKBnz57IzMzEzJkzddZaatiwITp16gTg8Zimjz/+GJMnT8bQoUPRs2dPxMTEYNOmTfj0008N7jCZI1EUkZGRATc3N733gnlFufhCufhCufgix1xcZ2JGEAShwp+mTZuy77//3phdM8YY8/X1ZQD0/iQkJLCEhIRytwNgY8aM0dnnd999xwICApiVlRVr2LAhW7FiBRNFsVLtys7OZgBYdna20dlMqaSkhO3evZuVlJTUdFNMinLxhXLxhXLxRY65zDGTod/vRl1pSkhI0FuuUCjg4uICR0dHY3YrSUxMfGodVsm7ihMmTMCECROMbBEhhBBC/u2M6jT5+vqauh2EEEIIIWbNpMPXc3JycOrUKdjY2KBr167PfEXwfxtBEODo6Ci7v2fKxRfKxRfKxRc55uI5k1Gz577//nts2rQJu3fvlmatnT9/Hr1798aDBw8APF6ivOzjRuSAZs8RQggh8mPo97tRw9Z/+uknFBcXa03zf+edd5CWlobXXnsNERERiIuLw9q1a43ZPTGQKIq4c+eO1lLyckC5+EK5+EK5+CLHXDxnMqrTdP36dbRq1Ur688OHDxEVFYXx48fjhx9+wL59+9CuXTts3rzZZA0lutRqNeLj47We1yMHlIsvlIsvlIsvcszFcyajOk1ZWVlwd3eX/hwTEwMAGDx4sFTWtWtXg2bBEUIIIYTwwKhOU+3atbUekXLs2DEolUp06dJFKmOM6TwRmRBCCCGEV0Z1mp5//nns2bMH//zzD27evIktW7agS5cuWs+tSkxMhJeXl8kaSnQJggB3d3cuZyBUhHLxhXLxhXLxRY65eM5k1Oy5qKgodO/eXats9+7d6NevH4DHg7y8vLwQHh6OrVu3mqalZoBmzxFCCCHyU62z58LCwrB3714MGjQIgwYNwrZt26QOEwDExsaibt26WmOciOmp1WpcvXqVy8F0FaFcfKFcfKFcfJFjLp4zGf2kvD59+mDnzp3YuXMnXnrpJa1tQUFBOHfuHIYOHVrlBpLyiaKIa9eucTltsyKUiy+Uiy+Uiy9yzMVzJs4eL0wIIYQQUjOq9BiV06dP48yZM8jKytJ7mU0QBHz44YdVeQtCCCGEELNgVKfp0aNHGDhwIGJjY1HROHLqNFUvhUIBHx8fKBTyumBIufhCufhCufgix1w8ZzJq9tzYsWOxceNGhIaGYsyYMahfvz4sLPT3v0JCQqrcSHNBs+cIIYQQ+TH0+92oK0379+9H+/btcezYMS7XWZALtVqNCxcu4Pnnn4dSqazp5pgM5eIL5eIL5eKLHHPxnMmoa2OFhYUIDg6mDlMNE0URSUlJXM5AqAjl4gvl4gvl4oscc/GcyahOU2BgID1XjhBCCCH/KkZ1mubNm4e9e/fi5MmTpm4PIYQQQohZMmpM0/3799GnTx+EhITglVdewQsvvFDuwKnRo0dXqYGkfAqFAgEBAVzOQKgI5eIL5eIL5eKLHHPxnMmo2XMKhQKCIGgtN/Dk+CbGGARB4HKZ9PLQ7DlCCCFEfqp19ty6deuMbhgxHZVKhdOnT6N9+/blLvnAI8rFF8rFF8rFFznm4jmTUa0dM2aMqdtBjMAYQ3p6eoULjPKIcvGFcvGFcvFFjrl4zsTfDUVCCCGEkBpQpetiiYmJ2Lx5M+Lj45GTkwMnJycEBgbilVdegZ+fn4maSAghhBBS84waCA4AX375JWbNmgWVSqVzic3S0hKff/45pk+fbpJGmgtzGwguiiKSk5Ph7e3N5SyE8lAuvlAuvlAuvsgxlzlmMvT73ahO0/79+9G/f3+4ubnh7bffRlhYGLy8vHD//n1ERUVh+fLlePjwIfbu3Ys+ffpUKYg5MbdOEyGEEEKqztDvd6O6eMuXL4erqyv+/vtvzJkzBx07doSvry86dOiA9957D3/99Rdq1aqF5cuXGx2APJ1KpUJkZCRUKlVNN8WkKBdfKBdfKBdf5JiL50xGdZr+/vtvDB8+HPXr19e73dvbG8OGDcNff/1VpcaRijHGkJuby+UMhIpQLr5QLr5QLr7IMRfPmYzqNJWUlMDe3r7COg4ODigpKTGqUYQQQggh5saoTlOTJk2wb9++ci+tqVQq7N+/H02aNKlS4wghhBBCzIVRnabRo0fj2rVr6Nmzp84tuLNnz6J37964du0aLYJZzZRKJTp16gSlUlnTTTEpysUXysUXysUXOebiOZNRs+fUajWGDBmCvXv3QhAE2NnZwcPDA2lpaSgoKABjDAMGDMCuXbvMZjqhKdDsOUIIIUR+qnX2nFKpxO7du7F+/XqEhobCysoKSUlJsLKyQlhYGDZs2IBff/3V6A5TXl4e5s2bh169esHV1RWCIGD9+vV66165cgW9evWCg4MDXF1d8eqrryI9PV2nniiK+Pzzz+Hv7w8bGxs8//zz2Lp1q1HtMxelpaU4cOAASktLa7opJkW5+EK5+EK5+CLHXDxnqtKK4KNHj8bo0aNN1RZJRkYGFi5cCB8fH7Rq1QrR0dF666WkpCA4OBjOzs5YtGgR8vLy8MUXX+DixYs4ffo0rKyspLrvv/8+lixZggkTJqBdu3bYs2cPRo4cCUEQMGLECJNneFZ4nLJpCMrFF8rFF8rFFznm4jWTWT5e2MvLC6mpqfD09MTZs2fRrl07vfUWLVqE/Px8/PXXX/Dx8QEAtG/fHj169MD69esxceJEAMDdu3exbNkyTJ48GatXrwYAjB8/HiEhIZg5cyaGDh3K5b1VQgghhDw7Rt0/279/PwYPHox79+7p3X7v3j0MHjwYhw4dMqpR1tbW8PT0fGq9Xbt2oW/fvlKHCQC6d++OJk2aYPv27VLZnj17UFpaikmTJkllgiDgzTffREpKCuLi4oxqJyGEEEL+PYy60vT111/j3r17qFu3rt7tdevWRUJCAr7++mv07t27Sg0sz927d5GWloa2bdvqbGvfvj0OHjwo/fncuXOwt7dHs2bNdOpptnft2lVnP8XFxSguLpb+nJOTA+Dx/VjNvViFQgGlUgm1Wg1RFKW6mvInn82nVCqhUCjKLX/yHq+FxeND9OSlTAsLCyiVSgQFBYExJr3O0tISoihCrVZLdQVBgIWFRbnl5bW9JjIBjxc+K5tLDpk0+9HkAiCLTMD/jldFbectkyZXWFiYzn54zqQpDwkJ0Tpv8J7J0tISCoVC67whh0yiKGqdN1QqFWWqpkyGjq8yqtN0/vx59O3bt8I6HTp0wP79+43ZvUFSU1MBPL6V9yQvLy88evQIxcXFsLa2RmpqKurUqQNBEHTqASj3itnixYuxYMECnfIjR47Azs4OAODj44PWrVvjwoULSEpKkuoEBASgadOmOH36tNbA9MDAQPj6+uL48ePIzc2Vyjt16gQPDw8cOXJE64MWFhYGW1tbrU4gAERERKCgoAAxMTFSmYWFBfr06YOMjAytq2eOjo4IDw9HcnIy4uPjpXJ3d3d07twZN27cwLVr16TymsxUWFiIqKgoysRhpoiICNllevHFF5GRkYGTJ0/KJpPm3PHHH3/IJpPm90lOx0mO5whzzlRQUABDGLXkgI2NDd5991188skn5db54IMP8MUXX6CoqKiyu9eiGdO0bt06jB07ViqPiYlBcHAwtm3bhmHDhmm95qOPPsLHH3+MzMxMuLi4oFu3bkhNTcXly5e16omiCKVSienTp2PlypU6763vSpO3tzcyMjKkKYk12ZMvLS3FoUOH0KNHD1haWgKQx5WmwsJC/P7771IuOWRSqVQoLS2VctnZ2ckiEwApV0REhNQe3jOVzdWrVy+tmcA8Z9I4ePCg1nmD90yWlpYoLi7Gb7/9JuWSQyZRFFFUVCSdN6ysrChTNWXKycmBm5vbU5ccMOpKk7u7u1bPT59r167B1dXVmN0bxNbWFgC0OjUamo6apo6tra1B9Z5kbW0Na2trnXLNF3lZSqVS72ByzYfH0PIn91tRuebK2ZPtUSgUepd7KK+8vLbXRKay5WVzySVT2f+XUyYNuWaqTFZzz1T2Vv6T23jNBEBqt6HnQ14ylT0fat6LMpk+U3kZdNpvUK0nBAcHY9++fbhw4YLe7efPn8fevXsREhJizO4Norm1prlNV1ZqaipcXV2lDo+Xlxfu37+v83BAzWvLG5tFCCGEEKJhVKdp9uzZAICuXbti4cKFiIuLQ1JSEuLi4rBgwQIEBQVBoVBgzpw5Jm1sWfXq1YO7uzvOnj2rs+306dMIDAyU/hwYGIiCggJcuXJFq96pU6ek7YQQQgghFWJG2rlzJ7O3t2cKhULrRxAE5ujoyH799Vdjd63lzJkzDABbt26dzrY33niD2drasqSkJKns6NGjDABbu3atVJacnMwsLS3Z5MmTpTJRFFlQUBCrV68eU6lUBrUlOzubAWDZ2dnGBzIhURRZSUkJE0WxpptiUpSLL5SLL5SLL3LMZY6ZDP1+N3pxyyFDhiAoKAjr16/HmTNnkJ2dDRcXF7Rv3x5jxoyBu7t7lTpzq1evRlZWljSzbd++fUhJSQEATJ06Fc7Ozpg7dy527NiBsLAwTJ8+HXl5eVi6dClatmyJ1157TdpX/fr18dZbb2Hp0qUoLS1Fu3btsHv3bsTExGDz5s1cL2xZWFgIR0fHmm6GyVEuvlAuvlAuvsgxF7eZnlEnrtJ8fX0ZAL0/CQkJUr1//vmHvfjii8zOzo65uLiwV155hd2/f19nf2q1mi1atIj5+voyKysr1rx5c7Zp06ZKtcncrjSVlJSw3bt3s5KSkppuiklRLr5QLr5QLr7IMZc5Zqr2K03VLTEx0aB6zZs3x+HDh59aTzPGqjrHWRFCCCFEvowaCE4IIYQQ8m9DnSbOlbf2BO8oF18oF18oF1/kmIvXTEatCP5vlZOTA2dn56euGEoIIYQQfhj6/U5XmjgmiiLS0tK0lpKXA8rFF8rFF8rFFznm4jkTdZo4plarERcXp/W8HjmgXHyhXHyhXHyRYy6eMxnVadKsnUQIIYQQ8m9hVKfJz88PAwYMwP79+7m8vEYIIYQQUllGdZo6duyIffv2YcCAAfDx8cFHH31k8LpKxHQEQYCjoyMEQajpppgU5eIL5eIL5eKLHHPxnMno2XPXr1/H999/j59++glpaWlQKBTo3r07JkyYgAEDBnA7nbAiNHuOEEIIkZ9qnz3XpEkTLF26FCkpKdixYwd69OiBo0ePYtiwYahXrx5mz56N69evG7t7YgBRFHHnzh3Z3SKlXHyhXHyhXHyRYy6eM1V59pyFhQWGDBmCQ4cOITExEfPmzYNCocAXX3yBZs2aISwsDNu3bwctB2V6arUa8fHxXM5AqAjl4gvl4gvl4oscc/GcyWRLDoiiiL/++gtnzpxBeno6GGPw9vZGbGwsXn75ZbRq1Qo3btww1dsRQgghhDxTVe403b59G3PnzoW3tzcGDx6MI0eOYMiQITh27BgSExORlJSEd999F1evXsWbb75pijYTQgghhDxzRo3WLi0txa5du/D999/jjz/+gCiK8Pf3x6JFi/Daa6/Bw8NDquvp6YnPPvsMOTk52Lhxo8kaTh7PQHB3d+dyBkJFKBdfKBdfKBdf5JiL50xGzZ5zd3fHo0ePoFQq0a9fP/znP//Biy++WOFrlixZgrlz53I58EuDZs8RQggh8lOts+fs7OywYMEC3LlzB7t27XpqhwkAJk2ahISEBGPejpRDrVbj6tWrXA6mqwjl4gvl4gvl4oscc/GcyahOU2JiIj744AN4eXkZ/BonJyf4+voa83akHKIo4tq1a1xfvdOHcvGFcvGFcvFFjrl4zmRUp4nH+5CEEEIIIVVh1EDw119//al1FAoFnJycEBAQgL59+6JevXrGvBUhhBBCiFkwqtO0fv166WqTvnHkgiBolU+dOhUfffQRPvjgAyObSfRRKBTw8fGBQmGy5bbMAuXiC+XiC+Xiixxz8ZzJqNlzCQkJeOutt3D69GlMnz4dXbp0QZ06dfDgwQPExsbiq6++Qvv27fH+++/j/Pnz+OSTT5CcnIwtW7Zg+PDh1ZHjmaDZc4QQQoj8VOvsuW3btuHUqVOIj4/He++9h6CgIDRp0gRBQUF477338Pfff+PkyZOIiorC+PHjERsbCwcHB6xZs8boQESXWq3GuXPnuJyBUBHKxRfKxRfKxRc55uI5k1Gdph9//BHDhg1DnTp19G739PTE0KFD8f333wMA6tWrh759++L8+fPGt5ToEEURSUlJXM5AqAjl4gvl4gvl4oscc/GcyahOU0pKCqytrSusY2Njg5SUFOnPPj4+KCoqMubtCCGEEEJqnFGdpnr16mH37t3ldoKKioqwe/durRlzaWlpqFWrlnGtJIQQQgipYUZ1msaNG4dbt26ha9eu2Lt3Lx4+fAgAePjwIfbu3YuuXbvi9u3bWksTxMTEoFWrVqZpNQHweAZCQEAAlzMQKkK5+EK5+EK5+CLHXDxnMmr2nFqtxmuvvYZNmzZJSw8oFArp/iRjDCNHjsTGjRuhUCjw4MEDLFmyBL169ULPnj1Nm+AZotlzhBBCiPxU6+w5pVKJjRs34ujRoxg9ejQCAwPh5+eHwMBAjBkzBr///js2bdok9SLr1KmDFStWcN1hMkcqlQonTpyASqWq6aaYFOXiC+XiC+Xiixxz8ZzJqMUtjx8/DicnJ4SHhyM8PNzUbSIGYowhPT1d7wKjPKNcfKFcfKFcfJFjLp4zGXWlKSwsDN99952p20IIIYQQYraM6jR5eHjAxsbG1G0hhBBCCDFbRnWaevTogejoaC4vrcmJUqlEYGAglEplTTfFpCgXXygXXygXX+SYi+dMRs2eu3fvHjp16oQXX3wRn332GVxdXaujbWaHZs8RQggh8lOts+dGjRoFFxcX/N///R/q1auH5557DmFhYdLAcM1Pt27djA5giBs3bmDEiBGoX78+7Ozs0LRpUyxcuBAFBQVa9U6cOIGuXbvCzs4Onp6emDZtGvLy8qq1bc+CSqVCZGQklzMQKkK5+EK5+EK5+CLHXDxnMmr2XHR0tPT/xcXFuHr1Kq5evapTT7OGU3VITk5G+/bt4ezsjClTpsDV1RVxcXGYN28e/vrrL+zZswcAEB8fj27duqFZs2ZYvnw5UlJS8MUXX+DGjRs4dOhQtbXvWWCMITc3V3a3SSkXXygXXygXX+SYi+dMRnWazOEhez/99BOysrLw559/onnz5gCAiRMnQhRFbNy4EZmZmahVqxbmzp2LWrVqITo6Wrrk5ufnhwkTJuDIkSN48cUXazIGIYQQQjjB3xrm/5WTkwPg8cKZZXl5eUGhUMDKygo5OTn4/fffMWrUKK17lKNHj4aDgwO2b9/+TNtMCCGEEH4ZdaWprLy8PFy/fh35+fkICgoyRZsMEhoais8++wzjxo3DggULULt2bZw4cQJr167FtGnTYG9vj9jYWKhUKrRt21brtVZWVggMDMS5c+cqfI/i4mIUFxdLf9Z01EpLS1FaWgrg8eNjlEol1Gq11hU4TblKpdK6BKlUKqFQKMot1+xXw8Li8SF68t6vhYUFFAoF2rVrB1EUpddZWlpCFEWo1WqpriAIsLCwKLe8vLbXRCbg8ZXMsrnkkEmlUmnl0uTkPZMmR7t27aBUKmWTSZOrU6dOEARBaz88Z9Lsp2PHjlrnDd4zWVpaQhAErfOGHDKJoqh13lCpVJSpmjI9ma08RneaEhMTMX36dBw8eBCiKEIQBOkvKDY2FhMmTMCaNWsQGhpq7FtUqFevXvj444+xaNEi7N27Vyp///338cknnwAAUlNTATy++vQkLy8vxMTEVPgeixcvxoIFC3TKjxw5Ajs7OwCAj48PWrdujQsXLiApKUmqExAQgKZNm+L06dNIT0+XygMDA+Hr64vjx48jNzdXKu/UqRM8PDxw5MgRrQ9aWFgYbG1tcfDgQa02REREoLCwEGfOnJHKLCws0KdPH2RkZCAuLk4qd3R0RHh4OJKTkxEfHy+Vu7u7o3Pnzrhx4wauXbsmldd0pqioKMrEaaa0tDTKxEEmQRDw22+/ySrTo0ePtM6Hcsgkx3OEuWZ6cgJZeYxaciApKQnt27fHw4cPMWDAANy/fx9xcXFSL1GlUqFu3boYNGgQvv3228ru3mCbNm3Cpk2bMGTIENSuXRsHDhzAunXr8NVXX2HKlCn46aefMHr0aJw6dQrt27fXeu3o0aOxd+9eZGVllbt/fVeavL29kZGRId3uq8mefGlpKY4cOYLw8HBYWloCkMeVpsLCQkRGRkq55JBJpVKhtLRUymVnZyeLTACkXD179pTaw3smTa6oqCh0795d62nsPGfSOHz4sNZ5g/dMlpaWKC4uxu+//y7lkkMmURRRVFQknTesrKwoUzVlysnJgZub21OXHDDqStO8efOQmZmJP/74A507d8aCBQu0eo4WFhYICgpCbGysMbs3yM8//4yJEyfi+vXrqF+/PgBg8ODBEEURs2fPxssvvwxbW1sA0Or4aBQVFUnby2NtbQ1ra2udcs0XeVlKpVLvQl2aD4+h5U/ut6JyQRCgVqt12qNQKLRO8k8rL6/tNZFJU/5kLjlkAiDlqqjtvGUCIJ3s5JQJeHzSVygUeuvzmqm0tFTveQPgNxPwuO2VOR/ykqns+VDzXpTJ9JnKy6DTfoNqPeHw4cMYNGgQOnfuXG4dX19f3L1715jdG2TNmjVo3bq11GHS6N+/PwoKCnDu3DnptpzmNl1ZqampqFu3brW1jxBCCCHyYlSn6dGjR/Dz86uwDmNM7xUeU3nw4IHWpTwNzaU2lUqFFi1awMLCAmfPntWqU1JSgvj4eAQGBlZb+wghhBAiL0Z1murUqYMbN25UWOfixYvw8fExqlGGaNKkCc6dO4fr169rlW/duhUKhQLPP/88nJ2d0b17d2zatElrANhPP/2EvLw8DB06tNra9yxYWFggLCys3MuOvKJcfKFcfKFcfJFjLp4zGf3A3v379+PChQt6t8fExCAyMhIRERFValxFZs6cCbVajaCgIHz88cdYs2YNIiIisHv3brz++uvSrbdPP/0Ujx49QkhICL755ht88MEHmDJlCl588UX06tWr2tr3rDxtXBavKBdfKBdfKBdf5JiL10xGdZo++OAD2NraIjg4GJ9++ilu3rwJADh06BA+/PBD9OrVC25ubpg5c6ZJG1tWcHAwTpw4gTZt2mDNmjV46623cOvWLXz66adYu3atVO+FF17A0aNHYWtri7fffhvfffcdxo0bh507d1Zb254VlUqFgwcPcvn8nopQLr5QLr5QLr7IMRfPmYy6Nubn54fDhw9jxIgR+PDDDyEIAhhj6Nu3Lxhj8PHxwc6dO/Wuj2RK7du311kfQp+uXbtW60w+QgghhMif0TcUO3TogBs3bmDfvn04deoUHj16BCcnJ3To0AEDBgyAlZWVKdtJCCGEEFKjqjQKy8LCAoMGDcKgQYNM1R5CCCGEELNk1Irg/1Y5OTlwdnZ+6oqhzwpjTHpujyAINd0ck6FcfKFcfKFcfJFjLnPMZOj3u9FXmkpKSrB7926cOXMGWVlZetdMEgQBP/74o7FvQQxQWFgIR0fHmm6GyVEuvlAuvlAuvsgxF6+ZjOo03blzBz169MCtW7dQ0YUq6jRVL5VKhaioKERERBi8BDwPKBdfKBdfKBdf5JiL50xGdZrefvtt3Lx5E6+++ipef/111K9fn8tFqgghhBBCDGVUTycyMhLdunXDhg0bTN0eQgghhBCzZNTilqIoonXr1qZuCzGCXK/wUS6+UC6+UC6+yDEXr5mMmj3Xo0cP2NjYYN++fdXRJrNlbrPnCCGEEFJ1hn6/G3WlacmSJYiMjJTFo0h4Jooi0tLSIIpiTTfFpCgXXygXXygXX+SYi+dMRl0fO3DgAMLCwjB8+HCEhITghRde0NszEwQBH374YZUbSfRTq9WIi4tDREQEFAqj+r9miXLxhXLxRY65mFqN0vizuB15DM7h3WAV2BaCUlnTzTIJOR4vnjMZ1WmaP3++9P/R0dGIjo7WW486TYQQQqqTKiYKJV8vB8tIQ0sAqsN7oHbzgNXkGbAICqvp5hGZMarTFBUVZep2EEIIIZWiiolC8YL3dMpZRtrj8nlLqONETMqoTlNISIip20GMIAgCHB0dzWYZelOhXHyhXHwx51yiKCIjI8PAymrYrloKAYC+JAxA4eovUNi4KaCo+Fadm5ub2d4mMufjZSyeM9Gz5yqBZs8RQkj1SUtLQ506dQyqG+TiiENtmj21Xu+/riAmK7fCOg8ePICHh4dB70vkqVpnzwGPl0FfsWIF2rdvDycnJ601F+Lj4zFp0iRcv37d2N0TA4iiiDt37nA5A6EilIsvlIsvcsnlaW3Y4zcMrWeu5HK8yuI5k1G35woLC/Hiiy/ixIkTcHNzg5OTE/Lz86Xt/v7+WLduHVxdXfHJJ5+YrLFEm1qtRnx8POrWrWu2l5aNQbn4Qrn4Ys653Nzc8ODBA4PqKi5fBD6Z+9R6a37ejtXPtXzq+5orcz5exuI5k1GdpkWLFiE2NhZLlizBzJkzsWDBAnz88cfSdmdnZ4SEhODw4cPUaSKEEGIQhUJh8G0yVjsUhW4eYBlp5dYR3OugdlCobJYfIDXPqC7etm3bEBYWhlmzZkEQBL2DuRo0aICkpKQqN5AQQgh5kqBUwmryjArrWE16mzpMxKSM6jQlJSWhbdu2FdZxdHREdna2UY0ihhEEAe7u7lzOQKgI5eIL5eKLnHJZBIXBet4SCG7aV6cE9zqwlslyA3I6Xho8ZzLq9pyjoyPS0sq/JAoAt27dgru7u1GNIoaxsLBA586da7oZJke5+EK5+CK3XBZBYVB2DoZ4MR7sUQYEVzcoWgbK5gqT3I4XwHcmo640dezYEfv27UNWVpbe7cnJyTh48CCCg4Or0jbyFGq1GlevXoVara7pppgU5eIL5eKLHHMJSiXQMhA36/oCMuowAfI8XjxnMqrTNHPmTGRmZqJbt26IjY2FSqUCABQUFODYsWPo2bMnVCoVZsyo+H4zqRpRFHHt2jUup21WhHLxhXLxhXLxRY65eM5k1O254OBgrF69GtOnT9e6muTo6AgAUCqVWLNmDdq0aWOaVhJCCCGE1DCjOk0A8OabbyI0NBTffPMNTp06hUePHsHJyQkdOnTApEmT0Lx5c1O2kxBCCCGkRhndaQKAZs2a4csvvzRVW0glKRQK+Pj4cLc42NNQLr5QLr5QLr7IMRfPmejZc5VAz54jhBBC5Kfanz1Hap5arca5c+e4nIFQEcrFF8rFF8rFFznm4jkTdZo4JooikpKSuJyBUBHKxRfKxRfKxRc55uI5E3WaCCGEEEIMUKWB4P82muFfOTk5NdySx0pLS1FQUICcnBxYWlrWdHNMhnLxhXLxhXLxRY65zDGT5nv9acO8qdNUCbm5uQAAb2/vGm4JIYQQQkwtNzcXzs7O5W6n2XOVIIoi7t27B0dHR7N40GBOTg68vb2RnJwsq9l8lIsvlIsvlIsvcsxljpkYY8jNzUXdunUrXAqBrjRVgkKhQP369Wu6GTqcnJzM5oNnSpSLL5SLL5SLL3LMZW6ZKrrCpEEDwQkhhBBCDECdJkIIIYQQA1CniWPW1taYN28erK2ta7opJkW5+EK5+EK5+CLHXDxnooHghBBCCCEGoCtNhBBCCCEGoE4TIYQQQogBqNNECCGEEGIA6jQRQgghhBiAOk2EEELMlp+fH8aOHVvTzSAEAHWauHDr1i385z//QYMGDWBjYwMnJyd06dIFX375JWbPng1BEJ76ExoaWtMxdPwbcxUWFkr11Go11q1bh9DQULi6usLa2hp+fn547bXXcPbs2Sq3Y/369RX+3Z08ebLK71GWKIpYv349+vfvD29vb9jb26NFixb45JNPUFRUVO7rrly5AkEQYGNjg6ysLL11QkND0aJFC5O2FwASEhIwZcoUNGnSBHZ2drCzs8Nzzz2HyZMn48KFC3pfM2vWLAiCgOHDh5u8PabCS66KPqPvvffeM2tHdTP0d7FsmUKhQN26dfHiiy8iOjpaa39+fn4QBAHdu3fX+37ff/+9tB9TnEv+bbkqQo9RMXMHDhzA0KFDYW1tjdGjR6NFixYoKSnBn3/+iZkzZ6Jr16746aefpPp5eXl48803MWjQIAwePFgqr1OnTk00v1z/1lyXLl3Cd999h8LCQgwePBi//fYbgoODMXfuXLi6uiIxMRHbt2/Hhg0bkJSUZJLH9ixcuBD+/v465Y0aNaryvssqKCjAa6+9ho4dO+KNN96Ah4cH4uLiMG/ePBw7dgyRkZF6n9m4adMmeHp6IjMzEzt37sT48eNN2q7y7N+/H8OHD4eFhQVeeeUVtGrVCgqFAlevXsUvv/yCtWvXIiEhAb6+vtJrGGPYunUr/Pz8sG/fPuTm5sLR0fGZtNdQPObS9xmtjk5yTTPkd7FHjx4YPXo0GGNISEjAmjVrEB4ejgMHDqB3795SPRsbG0RFReH+/fvw9PTU2t/mzZthY2NT4T9WTEmuufRixGzdvn2bOTg4sKZNm7J79+7pbL9x4wZbuXKlVll6ejoDwObNm/eMWll5lIuxyZMnMwBsxYoVOvVUKhVbunQpS05OrlJ71q1bxwCwM2fOVGk/hiouLmaxsbE65QsWLGAA2O+//66zTRRF5ufnx2bMmMEGDRrEQkND9e47JCSENW/e3GRtvXnzJrO3t2fNmjXTe6xKS0vZl19+yZKSkrTKIyMjGQAWGRnJLC0t2fr1603WJlPgLZchn1FfX182ZsyYZ9Ke6mLo7yIANnnyZK2yCxcuMADsxRdflMp8fX1Zt27dmJOTk865Mjk5mSkUCjZkyJBq//2Xa66K0O05M/b5558jLy8PP/74I7y8vHS2N2rUCNOnT6+BllXNvz1XSkoKvv32W/To0QNvvfWWTj2lUol33333mT0cWhRFfPnll2jZsiVsbGzg7u6OXr16aV3+VqlU+Pjjj9GwYUPpNuLcuXNRXFws1bGyskLnzp119j9o0CAAj2/DPSk2NhaJiYkYMWIERowYgePHjyMlJaUaUmr7/PPPkZ+fj3Xr1uk9VhYWFpg2bRq8vb21yjdv3oznnnsOYWFh6N69OzZv3lztba0MueZ60u3btzF06FC4urrCzs4OHTt2xIEDB7TqREdHQxAEbNu2DXPnzoWnpyfs7e3Rv39/JCcna9W9ceMGhgwZAk9PT9jY2KB+/foYMWIEsrOzn2UsvVq2bAk3NzckJCRoldvY2GDw4MHYsmWLVvnWrVtRq1Yt9OzZ81k2s9J4zUW358zYvn370KBBA71fRDz7t+c6dOgQVCoVXn311WfSruzsbGRkZGiVCYKA2rVrAwDGjRuH9evXo3fv3hg/fjxUKhViYmJw8uRJtG3bFgAwfvx4bNiwAS+99BLeeecdnDp1CosXL8aVK1fw66+/Vvj+9+/fBwC4ubnpbNu8eTMaNmyIdu3aoUWLFrCzs8PWrVsxc+ZMU0Qv1/79+9GoUSN06NDB4NcUFxdj165deOeddwAAL7/8Ml577TW9txFqCq+59H1G9X1eAODBgwfo3LkzCgoKMG3aNNSuXRsbNmxA//79sXPnTqmTrvHpp59CEATMnj0baWlpWLlyJbp37474+HjY2tqipKQEPXv2RHFxMaZOnQpPT0/cvXsX+/fvR1ZWlkFPvq9KzrK/i/pkZmYiMzNT7+30kSNH4sUXX8StW7fQsGFDAMCWLVvw0ksvwdLS0mTtfhq55tKrRq5vkafKzs5mANiAAQMq9Tpzv41FuRh7++23GQB27ty5am2T5tK5vh9ra2vG2P9uy0ybNk3n9aIoMsYYi4+PZwDY+PHjtba/++670i2dinTv3p05OTmxzMxMrfKSkhJWu3Zt9v7770tlI0eOZK1atdLZhylvz2mO1cCBA3W2ZWZmsvT0dOmnoKBA2rZz504GgN24cYMxxlhOTg6zsbHRe4u1JvCYq6LPqMaTt+feeustBoDFxMRIZbm5uczf35/5+fkxtVrNGGMsKiqKAWD16tVjOTk5Ut3t27czAOzLL79kjDF27tw5BoDt2LGjRnJqfhcZe3wba9y4cSw9PZ2lpaWxU6dOsW7dujEAbNmyZVI9X19f1qdPH6ZSqZinpyf7+OOPGWOMXb58mQFgf/zxxzO5PS/XXBWh23NmKicnBwDMbpBpVVGuZ/938PXXX+P333/X+jl06BAAYNeuXRAEAfPmzdN5nWbQ9sGDBwEAM2bM0NquuTLx5G2RshYtWoSjR49iyZIlcHFx0dp26NAhPHz4EC+//LJU9vLLL+P8+fO4dOlS5YMaSPP37+DgoLMtNDQU7u7u0s/XX38tbdu8eTPatm0r/cvY0dERffr0MZtbWTzn0vcZLc/BgwfRvn17dO3aVSpzcHDAxIkTkZiYiMuXL2vVHz16tNbv2ksvvQQvLy/pc625knT48GEUFBSYMpaOin4XNX788Ue4u7vDw8MDHTp0QGxsLGbMmFHurfxhw4Zh69atAB4fS29vbwQFBVVrjifJNZc+dHvOTDk5OQEAcnNza7glpkW5nv3fQfv27aXbbE+6desW6tatC1dX13Jff+fOHSgUCp3L6J6ennBxccGdO3f0vm7btm344IMPMG7cOLz55ps62zdt2gR/f39YW1vj5s2bAICGDRvCzs4OmzdvxqJFiwyNWCmaL9C8vDydbd9++y1yc3Px4MEDjBo1SirPysrCwYMHMWXKFKmtANClSxfs2rUL169fR5MmTaqlvYbiOVdFn9En3blzR+/tx2bNmknby868a9y4sVY9QRDQqFEjJCYmAgD8/f0xY8YMLF++HJs3b0ZQUBD69++PUaNGmfTWHGBYzgEDBmDKlCkQBAGOjo5o3rw57O3ty60/cuRIfPXVVzh//jy2bNmCESNG6J2lWp3kmksf6jSZKScnJ9StWxf//PNPTTfFpCgX0LRpUwDAxYsXERgYWM0tM53KnLB+//13jB49Gn369ME333yjsz0nJwf79u1DUVGRzpca8Hj8gmYsiqk5OzvDy8tL77HSfBlrvlA1duzYgeLiYixbtgzLli3Ted3mzZuxYMECk7e1MuSa61lYtmwZxo4diz179uDIkSOYNm0aFi9ejJMnTz6zCRka9evXL3edIn06dOiAhg0b4q233kJCQgJGjhxZja0znlxy0e05M9a3b1/cunULcXFxNd0Uk/q35+rduzeUSiU2bdr0jFpWvoYNG+LevXt49OhRuXV8fX0hiiJu3LihVf7gwQNkZWVprfcDAKdOncKgQYPQtm1bbN++HRYWuv82++WXX1BUVIS1a9dix44dWj+ffPIJ7ty5g9jYWNOE1KNPnz64efMmTp8+bVD9zZs3o0WLFjpt3bFjB7p3764z06emyDVXWb6+vrh27ZpO+dWrV6XtZT35uWWM4ebNm/Dz89Mqb9myJT744AMcP34cMTExuHv3rt4Ovzl6+eWXER0djWbNmnH1D7GnMcdc1GkyY7NmzYK9vT3Gjx+PBw8e6Gy/desWvvzyyxpoWdX823N5e3tjwoQJOHLkCFatWqVTTxRFLFu27JlMvR8yZAgYY3qvJjwevwlEREQAAFauXKm1ffny5QAef1FrXLlyBX369IGfnx/2798PW1tbve+7adMmNGjQAG+88QZeeuklrZ93330XDg4O1TqmZtasWbCzs8Prr7+u91hpsgNAcnIyjh8/jmHDhum09aWXXsJrr72Gmzdv4tSpU9XWXkPJNVdZEREROH36tNY/TvLz8/Hdd9/Bz88Pzz33nFb9jRs3at0K37lzJ1JTU6UFFXNycqBSqbRe07JlSygUCq0lNczZ+PHjMW/ePL1XC3lmjrno9pwZa9iwIbZs2YLhw4ejWbNmWitMnzhxAjt27ODymUyU6/HtgFu3bmHatGn45Zdf0LdvX9SqVQtJSUnYsWMHrl69ihEjRpikXYcOHZL+FV5W586dERYWhldffRVfffUVbty4gV69ekEURcTExCAsLAxTpkxBq1atMGbMGHz33XfIyspCSEgITp8+jQ0bNmDgwIEICwsD8HiMVs+ePZGZmYmZM2fqDBBv2LAhOnXqhHv37iEqKgrTpk3T215ra2v07NkTO3bswFdffVUtU4wbN26MLVu24OWXX0ZAQIC0cjb772rFW7ZsgUKhQP369bFlyxYwxtC/f3+9+4qIiICFhQU2b95cqan+1UGuucp67733sHXrVvTu3RvTpk2Dq6srNmzYgISEBOzatQsKhfa1AFdXV3Tt2hWvvfYaHjx4gJUrV6JRo0aYMGECACAyMhJTpkzB0KFD0aRJE6hUKvz0009QKpUYMmSISdte0e9igwYNjN6vr68v5s+fX4WWVY1cc+lVI3P2SKVcv36dTZgwgfn5+TErKyvm6OjIunTpwlatWsWKioq06pr71Pyy/u25VCoV++GHH1hQUBBzdnZmlpaWzNfXl7322msmWY6gounAANi6deukdixdupQ1bdqUWVlZMXd3d9a7d2/2119/SfsqLS1lCxYsYP7+/szS0pJ5e3uzOXPmaOVJSEio8P0008aXLVvGALBjx46V2/b169czAGzPnj2MMdOvCK5x8+ZN9uabb7JGjRoxGxsbZmtry5o2bcreeOMNFh8fzxhjrGXLlszHx6fC/YSGhjIPDw9WWlpq8jYag5dcxq4IfuvWLfbSSy8xFxcXZmNjw9q3b8/279+vVUez5MDWrVvZnDlzmIeHB7O1tWV9+vRhd+7ckerdvn2bvf7666xhw4bMxsaGubq6srCwMHb06FGT53za7yL0rJytj2ZqviHvWVNLDvCcqyICY2Wu1xJCCCEyEB0djbCwMOzYsQMvvfRSTTeHyASNaSKEEEIIMQB1mgghhBBCDECdJkIIIYQQA9CYJkIIIYQQA9CVJkIIIYQQA1Cn6V8sOjoagiAgOjq6pptCSI2aP3++0Y9s+emnn9C0aVNYWlrqPJSYEENU5fNHni3qNBFCiJGuXr2KsWPHomHDhvj+++/x3Xff1XSTKm3NmjVYv359TTeDyMSJEycwf/58ZGVlGfV6c/880orghBBipOjoaIiiiC+//BKNGjWq6eYYZc2aNXBzc+NyFX5ifk6cOIEFCxZg7NixRl15NffPI11p4pAoiigqKqrpZhDyr5eWlgYAdFuOkH8J6jTVIM197KtXr2LYsGFwcnJC7dq1MX36dK1OkSAImDJlCjZv3ozmzZvD2toav/32GwDg7t27eP3111GnTh1YW1ujefPm+L//+z+d90pJScHAgQNhb28PDw8PvP322yZ7GKUmx/Xr1zFq1Cg4OzvD3d0dH374IRhjSE5OxoABA+Dk5ARPT0+dhy8WFxdj3rx5aNSoEaytreHt7Y1Zs2bptG/dunUIDw+Hh4cHrK2t8dxzz2Ht2rU67Tl79ix69uwJNzc32Nrawt/fH6+//rpJshL+/fnnn2jXrh1sbGzQsGFDfPvtt3rrbdq0CW3atIGtrS1cXV0xYsQIJCcnS9v9/Pwwb948AIC7uzsEQaj252TduXMHkyZNQkBAAGxtbVG7dm0MHToUiYmJWvXKGyOzfv16CIIg1ffz88OlS5fwxx9/QBAECIKA0NBQqf7t27cxdOhQuLq6ws7ODh07dtR5piCpnKd9/kJCQtCqVSu9rw0ICEDPnj0BAImJiRAEAV988QW+++47NGzYENbW1mjXrh3OnDlT7Tn0mT9/PmbOnAkA8Pf3lz5TiYmJUKlU+Pjjj6V2+vn5Ye7cuVrn+ad9Hs0B3Z4zA8OGDYOfnx8WL16MkydP4quvvkJmZiY2btwo1YmMjMT27dsxZcoUuLm5wc/PDw8ePEDHjh2lTpW7uzsOHTqEcePGIScnB2+99RYAoLCwEN26dUNSUhKmTZuGunXr4qeffkJkZKRJc2geVLtkyRIcOHAAn3zyCVxdXfHtt98iPDwcn332GTZv3ox3330X7dq1Q3BwMERRRP/+/fHnn39i4sSJaNasGS5evIgVK1bg+vXr2L17t7T/tWvXonnz5ujfvz8sLCywb98+TJo0CaIoYvLkyQAe/8v/xRdfhLu7O9577z24uLggMTERv/zyi0mzEj5dvHhR+nzMnz8fKpUK8+bNQ506dbTqffrpp/jwww8xbNgwjB8/Hunp6Vi1ahWCg4Nx7tw5uLi4YOXKldi4cSN+/fVXrF27Fg4ODnj++eertf1nzpzBiRMnMGLECNSvXx+JiYlYu3YtQkNDcfnyZdjZ2VVqfytXrsTUqVPh4OCA999/HwCkv4sHDx6gc+fOKCgowLRp01C7dm1s2LAB/fv3x86dOzFo0CCT55M7Qz5/r776KiZMmIB//vkHLVq0kMrPnDmD69ev44MPPtDa55YtW5Cbm4v//Oc/EAQBn3/+OQYPHozbt29Xy8OuKzJ48GBcv34dW7duxYoVK+Dm5gbg8T8qxo8fjw0bNuCll17CO++8g1OnTmHx4sW4cuUKfv31VwAVfx7NRo088Y4wxhibN28eA8D69++vVT5p0iQGgJ0/f54x9vhhhwqFgl26dEmr3rhx45iXlxfLyMjQKh8xYgRzdnZmBQUFjDHGVq5cyQCw7du3S3Xy8/NZo0aNGAAWFRVlkhwTJ06UylQqFatfvz4TBIEtWbJEKs/MzGS2trbSAzh/+uknplAoWExMjNY+v/nmGwaAxcbGSmWaPGX17NmTNWjQQPrzr7/+WqMPcyTmbeDAgczGxkbrga2XL19mSqWSaU6HiYmJTKlUsk8//VTrtRcvXmQWFhZa5ZrPfnp6+jNpv77fgbi4OAaAbdy4UaddT9I87DQhIUEqa968OQsJCdGp+9ZbbzEAWr+bubm5zN/fn/n5+TG1Wl21MP9Chnz+srKymI2NDZs9e7bWa6dNm8bs7e1ZXl4eY+x/D8iuXbs2e/TokVRvz549DADbt2/fM0ika+nSpTqfsfj4eAaAjR8/Xqvuu+++ywCwyMhIqay8z6O5oNtzZkBzlURj6tSpAICDBw9KZSEhIXjuueekPzPGsGvXLvTr1w+MMWRkZEg/PXv2RHZ2Nv7++29pP15eXloPrbSzs8PEiRNNmmP8+PHS/yuVSrRt2xaMMYwbN04qd3FxQUBAAG7fvg0A2LFjB5o1a4amTZtqZQgPDwcAREVFSa+1tbWV/j87OxsZGRkICQnB7du3kZ2dLe0fAPbv34/S0lKT5iN8U6vVOHz4MAYOHAgfHx+pvFmzZtItDwD45ZdfIIoihg0bpvWZ9PT0ROPGjbU+k89a2d+B0tJSPHz4EI0aNYKLi4v0+24qBw8eRPv27dG1a1epzMHBARMnTkRiYiIuX75s0veTO0M/f87OzhgwYAC2bt0K9t+1p9VqNbZt2yYNsShr+PDhqFWrlvTnoKAgAJDOseZA8102Y8YMrfJ33nkHALi65UudJjPQuHFjrT83bNgQCoVCa5yCv7+/Vp309HRkZWXhu+++g7u7u9bPa6+9BuB/g1Tv3LmDRo0a6YxxCAgIMGmOsicC4PEvv42NjXSJtmx5ZmYmAODGjRu4dOmSToYmTZpoZQCA2NhYdO/eHfb29nBxcYG7uzvmzp0LAFKnKSQkBEOGDMGCBQvg5uaGAQMGYN26dSYbv0X4lZ6ejsLCQp3fN0D7d+HGjRtgjKFx48Y6n8srV65ofSaftcLCQnz00Ufw9vaGtbU13Nzc4O7ujqysLOl3wFTu3Lmj9xzRrFkzaTsxnKGfPwAYPXo0kpKSEBMTAwA4evQoHjx4gFdffVXntU+edzUdKM051hzcuXMHCoVCZ4app6cnXFxcuPos0ZgmM6RvAGfZf2ECj2fQAcCoUaMwZswYvfup7vEVT1IqlQaVAZD+BSWKIlq2bInly5frreft7Q0AuHXrFrp164amTZti+fLl8Pb2hpWVFQ4ePIgVK1ZIfx+CIGDnzp04efIk9u3bh8OHD+P111/HsmXLcPLkSTg4OJgiKpExURTx/+3de1BU1x0H8O9lebqsLK9VSXTBrSYVAwaC4IAghJECohgIIdtYDBlTI0FNTWxeM0irURJjYmk0oxMeahO1kvjqqK2K4AO1YBprBAryMEVFIiAERRB+/YPZG6674F00QuPvM+OMnHP23N997PLbe885CIKAffv2mbx+B/MaSk1NRXZ2NhYvXowpU6bAwcEBgiAgMTFRfA8Apj9DgJ47Fmzoi4iIwIgRI7BlyxYEBwdjy5YtGDlyJMLDw43a3u0zdij5OSzgyUnTEFBRUSG5k1RZWYnu7m64u7v3+RpXV1eoVCp0dXWZfCP1ptVqce7cORCR5KItLy+/59jvlU6nwzfffIOnn3663zfUnj17cOvWLezevVvyzaqvRyUBAQEICAjAihUr8Pnnn+PXv/41tm7dKnmEyB4urq6usLOzQ0VFhVFd7/eCTqcDEcHDw0O84zlU7NixA0lJSZIZqO3t7UYLCRruNjQ3N0uWQzD1jb6v951WqzX5GVFWVibWM/nkXn9ATyKk1+uRk5ODjIwM7Ny5E/PmzeszQRpKTF1PWq0W3d3dqKioEO9UAj2TDZqbmyXX0lBPrPjx3BDwySefSH7OzMwEAERGRvb5GoVCgbi4OOTl5eHcuXNG9Q0NDeL/o6KicOnSJezYsUMsu3HjxpBYvTghIQF1dXXYuHGjUd3NmzfR1tYG4MdvU72/PV2/fh3Z2dmS1zQ1NRl9w5o0aRIA8CO6h5xCoUBERAR27tyJixcviuWlpaU4cOCA+PMzzzwDhUKB9PR0o2uJiHDt2rUHFvOdFAqFUUyZmZlGd5B0Oh0AoLCwUCxra2tDbm6uUZ9KpdLk6s1RUVE4ffo0ioqKJH1s2LAB7u7ukjGW7O7kXn8Gc+bMQVNTE37729/ihx9+wAsvvPAgwx0ww5ir3tdUVFQUgJ7Zcb0ZnjBER0dLXj/Q1cQfBL7TNARUV1dj5syZ+NWvfoWioiJs2bIFer2+z7U6DFatWoX8/Hz4+/tj3rx5mDBhAhobG3HmzBkcPHgQjY2NAIB58+bhz3/+M37zm9+gpKQEo0aNwubNm82envxTmDNnDrZv34758+cjPz8fgYGB6OrqQllZGbZv344DBw7gqaeewvTp02FtbY2YmBjxQ2Tjxo3QaDS4fPmy2F9ubi7WrVuH2bNnQ6fTobW1FRs3bsTw4cPFNy57eKWnp2P//v2YOnUqFixYgNu3byMzMxOenp44e/YsgJ6EY/ny5XjrrbdQU1OD2NhYqFQqVFdX46uvvsLLL7+M119/fVDinzFjBjZv3gwHBwdMmDABRUVFOHjwIJydnSXtpk+fjjFjxuCll17CG2+8AYVCgaysLLi6ukp+YQOAr68v1q9fj+XLl+MXv/gFNBoNwsLC8Oabb+KLL75AZGQkFi5cCCcnJ+Tm5qK6uhp5eXmwsODv3OaSc/0ZPPnkk5g4caI4WcbHx2eQojaPr68vAOCdd95BYmIirKysEBMTg6SkJGzYsAHNzc0ICQnB6dOnkZubi9jYWISGhkpeb+p6HDIGY8oe62GYFnz+/HmKj48nlUpFjo6O9Oqrr9LNmzfFdgAoJSXFZB/19fWUkpJCo0ePJisrKxo5ciQ9/fTTtGHDBkm72tpamjlzJg0bNoxcXFxo0aJFtH///vu65MCd066TkpJIqVQatQ8JCSFPT0/x546ODsrIyCBPT0+ysbEhR0dH8vX1pfT0dLp+/brYbvfu3eTl5UW2trbk7u5OGRkZlJWVJZneeubMGXr++edpzJgxZGNjQxqNhmbMmEHFxcX3tI/s56OgoIB8fX3J2tqaxo4dS59++qnJKfp5eXkUFBRESqWSlEolPf7445SSkkLl5eVimwe95EBTUxO9+OKL5OLiQvb29hQREUFlZWWk1WrFZTwMSkpKyN/fn6ytrWnMmDG0Zs0ak0sOXLlyhaKjo0mlUhEAyXTvCxcuUHx8PKnVarK1taXJkyfT3r17H8i+/lzJvf6IiN5//30CQO+9955RnWHJgQ8++MCoDgClpaX9FOHL8sc//pEeeeQRsrCwEK+3zs5OSk9PJw8PD7KysqLRo0fTW2+9Re3t7ZLX9nc9DgUC0RAcLfaQWLZsGdLT09HQ0GA0w4wxxtjDbe3atXjttddQU1NjNEuODQ6+v8oYY4wNMUSEzz77DCEhIZwwDSE8pokxxhgbItra2rB7927k5+fj3//+N3bt2jXYIbFeOGlijDHGhoiGhgbo9Xqo1Wq8/fbbmDlz5mCHxHrhMU2MMcYYYzLwmCbGGGOMMRk4aWKMMcYYk4GTJsYYY4wxGThpYowxxhiTgZMmxhhjjDEZOGlibAjLycmBIAjIyckZ7FBkWbZsGQRBwJEjRwY7lCGtpqYGgiBg7ty5g7L9adOmDfm/Jv+gDfY5Yf8fOGlibBD9v31QHzlyBIIgYNmyZYMdypDHiYn53N3d4e7uPthhMNYnXtySsSFs9uzZCAgIwKhRowY7FFleffVVJCYm8p99YIz9LHHSxNgQ5uDgAAcHh8EOQzYXFxf+49OMsZ8tfjzHmBkKCwsRExMDFxcX2NjYYNy4cXj33Xdx48YNo7Z5eXkICQmBRqOBra0t3NzcEB4ejry8PAA945U8PDwAALm5uRAEQfxnGBPU15gmQRAwbdo01NXVQa/Xw8XFBSqVCtHR0aiqqgIAlJaWIjY2Fk5OTlCpVIiPj0d9fb1RnFlZWZg1axbc3d1ha2sLJycnREREID8/X9Ju2bJlCA0NBQCkp6dL4q2pqRHb9DWmac+ePQgNDYWDgwPs7Ozg7e2NNWvW4Pbt25J2vR9ZVlZWYvbs2XB0dIRSqUR4eDi++eab/k9SL3PnzoUgCKiqqsLq1asxfvx42NnZYcKECdi6dSsAoKOjA++88464/15eXti3b5/J/lpbW5GWlgZPT0/Y2dlBrVYjIiICx44dk7QTBAEFBQXi/w3/TD2GNWcfz507h4SEBGg0GtjY2MDDwwOLFy/GtWvXTLY/duwYQkJCoFQq4ezsjOeeew7fffed3MMnkZ2dDX9/f9jb28Pe3h7+/v4mx9r1foR74sQJTJ8+HWq1ut9HlYZzXltbi9raWskxu/NRsNw4Btqesf7wnSbGZFq/fj1SUlKgVqsRExMDjUaD4uJirFixAvn5+cjPz4e1tbXYdsGCBRg1ahRmz54NZ2dnXLlyBadPn8ZXX32FuLg4TJo0CYsWLcLatWvh7e2N2NhYcVtyxnU0NTUhKCgII0eORFJSEv7zn/9g7969KCsrw65duzB16lT4+voiOTkZJSUlyMvLQ2NjIw4fPizpJyUlBd7e3ggPD4erqyvq6uqwc+dOhIeH48svv8SsWbMA9IzRqampQW5uLkJCQjBt2jSxD7Va3W+sa9aswZIlS+Dk5AS9Xg+lUondu3djyZIlOHr0KL788kujX6o1NTUICAiAp6cnkpOTceHCBezatQuhoaEoLS3FiBEj7nqMDH73u9/h1KlTiImJgUKhwNatW6HX6+Ho6IjMzEycP38e0dHRaG9vx+eff45Zs2ahtLQUOp1O7KOxsRHBwcH49ttvERgYiPnz56OlpUWM6a9//at4DtPS0pCTk4Pa2lqkpaWJfUyaNGnA+3js2DFERESgo6MD8fHxcHd3R1FREdauXYu9e/fi5MmTkrt8hw4dQmRkJCwsLPDcc8/Bzc0Nhw4dQmBgIBwdHWUfOwBYuHAhMjMz8cgjj+Cll14C0POl4MUXX8TXX3+NtWvXGr3mxIkTeO+99xAaGoqXX34ZFy9e7LN/tVqNtLQ0fPzxxwCAxYsXi3W9rzNz4xhI3Iz1ixhjd/Xtt9+SpaUleXt70/fffy+pW7lyJQGg1atXi2U+Pj5kbW1N9fX1Rn31fn11dTUBoKSkJJPbzc7OJgCUnZ0tKQdAAOi1116TlL/yyisEgNRqNX388cdieXd3N0VFRREAKikpkbymqqrKaLuXLl0iNzc3GjdunKQ8Pz+fAFBaWprJeNPS0ggA5efni2WVlZVkaWlJGo2GLl68KJa3t7dTUFAQAaBNmzaJ5YZjAoBWrVol6f/dd98lALRy5UqT279TUlISAaDx48fT1atXxfJTp06JxykoKIh++OEHsW7btm0EgFJTUyV96fV6AkAbN26UlNfX19Po0aPJ1dWVbt68KZaHhIRQXx+x5u5jV1cX6XQ6AkD79++XtH/jjTcIACUnJ0vajx07lgRBoKNHj4rl3d3d4n7I/fgvKCggAPTLX/6SmpubxfLGxkYaP348AaDCwkKx3HCNAKCsrCxZ2zDQarWk1WrvSxzmtr/be5ExIiJOmhiTYeHChUYfsgZdXV3k6upKvr6+YpmPjw8plUpqbGzst997SZrs7e2pra1NUl5YWEgASKfTUXd3t6Ru06ZNZv0iS01NJQBUU1Mjlg0kafrDH/5AACgjI8Oo/fHjxwkAhYWFiWWGY+Lh4UFdXV2S9oa6Z555RtY+GJKm3Nxco7qxY8cSACooKJCU3759m6ysrCg4OFgsa2hoIIVCIYmztz/96U8EgPbs2SOWyUma5O6j4bxGRkYa9dXa2kpOTk5ka2tLt27dIqIfE4aYmBij9jU1NaRQKGQnTcnJyQSAtm3bZlT3l7/8xShhM1wjPj4+svrvrb+kydw4zG3PSROTgx/PMSbDyZMnAQAHDhzAoUOHjOqtrKxQVlYm/pyYmIilS5di4sSJ0Ov1CA0NRVBQEIYPH37fYho3bhyGDRsmKTPMsvPy8jJ63GWou3TpkqS8qqoKK1euxOHDh1FXV4dbt25J6i9dugStVjvgOL/++msA0scsBlOmTIGtrS3+9a9/GdVNmjQJFhbSYZePPvooAKC5udmsGO58LAb0HI+qqiqjOoVCAY1GIzlO//znP9HV1YVbt26ZXG6hoqICAFBWVoYZM2aYFZecfezvGNrb2+Opp57C3//+d5SXl+OJJ54Qx0RNnTrVqL1Wq8Xo0aPFcWh309+2DWPcTJ0/Pz8/Wf3LZW4cA42bsf5w0sSYDI2NjQCAFStWyGr/+uuvw9nZGevXr8eHH36I1atXw9LSEtHR0fjoo4/EAeD3wlQCZmlpede6zs5OsayyshKTJ09GS0sLQkNDERMTg+HDh8PCwgJHjhxBQUGBURJlrpaWFgAwOQZJEASMGDECdXV1RnX97UNXV5dZMQzkWPU+Tobzf/z4cRw/frzP7bS1td23uHrvY3/HEPgxITa0u379OgBAo9GYbD9ixAjZSVNLSwssLCzg6upqsh9BEMTt3ll3P5kbx0DjZqw/nDQxJoPhl1tLSwtUKtVd2wuCgOTkZCQnJ+PatWs4evQovvjiC2zfvh0VFRU4e/YsFArFTx32XX300UdoamrC5s2b8cILL0jq5s+fL84AuxeGY1dfX290x4qIUF9ff1/vwP0UDPEtWbIEq1evHrTtm5r9CABXrlyRtDMsU3H16lWT7fvqp69td3d3o6GhwSgJu3r1KojI5Pm73wt7mhvHQONmrD+85ABjMvj7+wP48TGdOZydnREbG4tt27YhLCwM58+fR2VlJQCIiZO5d07ulwsXLgCAOEPOgIhM3lEZSLxPPvkkAJhchuDUqVNob283+fhsKPHz84MgCCgqKpL9mvt5bvs7hm1tbSguLoadnR0ee+wxAIC3tzcA4OjRo0bta2trzVp2oL9tG8ru1/lTKBR9Hi9z43iQcbOHBydNjMmwYMECWFpaIjU11eTU6ebmZnEMBdDzoUxEkjadnZ3iYx5bW1sAgKOjIwRBGPDaOffKcOfnznWGVq1ahXPnzhm1d3JyAgCz4tXr9bC0tMSaNWsk44Q6Ojrw+9//HgCG/J+RGTlyJBISEnDixAl88MEHRucW6EkAe6/XNZBj1ZfAwEDodDrs27cPBw8elNQtX74c165dw/PPPy8ueREUFAQPDw/s3btXcm6JCG+//bZZiVxSUhKAnrW5ej/Oun79OtLT0yVt7pWTkxO+//57tLe333McDzJu9vDgx3OMyTBx4kSsW7cOr7zyCh577DFERUVBp9OhtbUVVVVVKCgowNy5c/Hpp58CAGJjYzF8+HAEBARAq9Wis7MT//jHP3D+/HnEx8eLyYq9vT38/PxQWFiIOXPmYNy4cbCwsMCcOXPuafC1XPPnz0d2djbi4uKQkJAAZ2dnnDx5EmfOnEF0dDT+9re/Sdo//vjjcHNzw9atW2FjY4NHH30UgiAgNTW1z5XLdTodMjIysGTJEnh5eSEhIQFKpRJ79uxBeXk5Zs2aZfRocChat24dysvLsXTpUmzevBlTpkyBWq3Gd999h+LiYlRUVODy5cvi4PywsDDs2LEDcXFxiIyMhK2tLby9vRETE2P2ti0sLJCTk4OIiAhERUXh2WefhVarRVFREY4cOQKdTodVq1ZJ2m/YsAFRUVEIDw8X12k6fPgwLl++DC8vL5w9e1bWtoODg5GamorMzExMnDgRcXFxICLk5eXhv//9LxYuXIjg4GCz98mUsLAwFBcXIzIyElOnToW1tTWCg4PFf+bE8SDjZg+RQZq1x9j/pdOnT1NiYiK5ubmRlZUVubi4kI+PD7355ptUWloqtlu3bh3NnDmTtFot2drakrOzM02ePJnWr19PHR0dkj7Ly8spKiqK1Go1CYIgmbLf35IDISEhRvH1N226r+UC8vPzKTAwkFQqFanVaoqKiqKSkhKTywcQEZ08eZJCQkJIpVKJ6/FUV1cTkeklBwx27dolvs7GxoaeeOIJ+vDDD6mzs1P2PvS376YYlhwwxNdbf0sC9DX1/caNG/T++++Tr68vKZVKsrOzIw8PD4qNjaVNmzZJ9qWzs5OWLl1KY8aMIUtLS8k+DXQfz549S/Hx8eTi4kJWVlak1Wpp0aJF1NDQYLKfwsJCCg4OJjs7O3JycqJnn32Wamtr+933vmRlZZGfnx8NGzaMhg0bRn5+fiaXr7jbshT9aW1tpXnz5tGoUaPEZRHu7EduHOa25yUHmBwCkYn7zIwxxhhjTILHNDHGGGOMycBJE2OMMcaYDJw0McYYY4zJwEkTY4wxxpgMnDQxxhhjjMnASRNjjDHGmAycNDHGGGOMycBJE2OMMcaYDJw0McYYY4zJwEkTY4wxxpgMnDQxxhhjjMnASRNjjDHGmAz/Ay7LJLzUEjiMAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "exp='image_net'\n", + "file_name = 'res-sophIA-resnet.json'\n", + "name = 'image_net'\n", + "plot_all_error_bar(folder, file_name, name, exp, meas_calc_list, to_save=do_save)\n", + "# plot_evolution(folder, file_name, name, exp, meas_calc_list, to_save=do_save)\n", + "\n", + "new_file_name = 'res-sophIA-resnet_dyn-EPM.json'\n", + "total_to_dynamic_EPM(exp,folder, file_name, new_file_name, meas_calc_list, idle_power)\n", + "plot_all_error_bar(folder, new_file_name, name+'_dyn-EPM', exp, meas_calc_list_2, to_save=do_save)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.8" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/creating_plots/survey/README.txt b/creating_plots/survey/README.txt new file mode 100644 index 0000000..0e22a41 --- /dev/null +++ b/creating_plots/survey/README.txt @@ -0,0 +1 @@ +Place here the results (json file) of your experiments. \ No newline at end of file diff --git a/dictionary_TAPO_VAR_initialisation.py b/dictionary_TAPO_VAR_initialisation.py new file mode 100644 index 0000000..b886c98 --- /dev/null +++ b/dictionary_TAPO_VAR_initialisation.py @@ -0,0 +1,17 @@ +import json + +def dictionary_TAPO_VAR_initialisation(): + d = { + "TAPORUN" : False, + "TAPOSAVED" : False + } + + with open('TAPO-VAR.json', 'w') as f: + json.dump(d, f, indent = 4, sort_keys=True) + + # with open('TAPO-VAR.json', 'r') as f: + # d = json.load(f) + # print(d["TAPORUN"]) + +if __name__ == "__main__": + dictionary_TAPO_VAR_initialisation() \ No newline at end of file diff --git a/dictionary_UTIL_VAR_initialisation.py b/dictionary_UTIL_VAR_initialisation.py new file mode 100644 index 0000000..9002490 --- /dev/null +++ b/dictionary_UTIL_VAR_initialisation.py @@ -0,0 +1,17 @@ +import json + +def dictionary_UTIL_VAR_initialisation(): + d = { + "UTIL_RUN" : False, + "UTIL_SAVED" : False + } + + with open('UTIL-VAR.json', 'w') as f: + json.dump(d, f, indent = 4, sort_keys=True) + + # with open('TAPO-VAR.json', 'r') as f: + # d = json.load(f) + # print(d["TAPORUN"]) + +if __name__ == "__main__": + dictionary_UTIL_VAR_initialisation() \ No newline at end of file diff --git a/dictionary_list_initialisation.py b/dictionary_list_initialisation.py new file mode 100644 index 0000000..f7e0417 --- /dev/null +++ b/dictionary_list_initialisation.py @@ -0,0 +1,73 @@ +import copy +import json +import os + +def dictionary_list_initialisation(path): + + file1 = os.path.join(path, 'res_calc-time.json') + file2 = os.path.join(path, 'res_meas-time.json') + + for_calculator = { + "epochs" : [], + "time" : [], + "energy_consumed" : [], + "co2_emissions" : []} + + for_device = { + "NOCALC": copy.deepcopy(for_calculator), + "CT:meas": copy.deepcopy(for_calculator), + "CT:pred": copy.deepcopy(for_calculator), + "GA:def": copy.deepcopy(for_calculator), + "GA:auto-para": copy.deepcopy(for_calculator), + "ECO2AI": copy.deepcopy(for_calculator), + "CC:on": copy.deepcopy(for_calculator), + "TAPO": copy.deepcopy(for_calculator), + "FLOPS": copy.deepcopy(for_calculator)} + + for_experiment = { + "linux_alienware": { + "cuda": copy.deepcopy(for_device), + "cpu": copy.deepcopy(for_device) + } + } + + measurements = { + "mnist": { + "training": copy.deepcopy(for_experiment), + "inference": copy.deepcopy(for_experiment) + }, + "cifar10": { + "training": copy.deepcopy(for_experiment), + "inference": copy.deepcopy(for_experiment) + }, + "CUB_200_2011": { + "training": copy.deepcopy(for_experiment), + "inference": copy.deepcopy(for_experiment) + }, + "image_net": { + "training": copy.deepcopy(for_experiment), + "inference": copy.deepcopy(for_experiment) + }, + "SQUAD-extracted": { + "training": copy.deepcopy(for_experiment), + "inference": copy.deepcopy(for_experiment) + }, + "SQUAD-v1-1": { + "training": copy.deepcopy(for_experiment), + "inference": copy.deepcopy(for_experiment) + }, + "idle": { + "training": copy.deepcopy(for_experiment), + "inference": copy.deepcopy(for_experiment) + } + } + + with open(file1, 'w') as f: + json.dump(measurements, f, indent = 4, sort_keys=True) + + with open(file2, 'w') as f: + json.dump(measurements, f, indent = 4, sort_keys=True) + + +if __name__ == "__main__": + dictionary_list_initialisation('.') \ No newline at end of file diff --git a/display_measurements.py b/display_measurements.py new file mode 100644 index 0000000..cc3f0bc --- /dev/null +++ b/display_measurements.py @@ -0,0 +1,25 @@ +import copy +import pprint +import json + +with open('measurements_data.json', 'r') as f: + measurements = json.load(f) + pprint.pprint(measurements) + +# device = 'cuda' +# computer = 'linux_alienware' + +# with open('measurements_data.json', 'w') as f: +# my_info = measurements["mnist"]["training"][computer][device]["CT"] +# print(my_info) + +# my_info["time"] = 999 + +# print(my_info) + +# json.dump(measurements, f, indent = 4, sort_keys=True) + +# with open('measurements_data.json', 'r') as f: +# mmm = json.load(f) +# print(mmm["mnist"]["training"][computer][device]["CT"]) +# pprint.pprint(mmm) \ No newline at end of file diff --git a/exp-0-idle/idle.py b/exp-0-idle/idle.py new file mode 100644 index 0000000..1f99430 --- /dev/null +++ b/exp-0-idle/idle.py @@ -0,0 +1,148 @@ +import torch +# import torch.nn.functional as F +import torch.optim as optim +from torch.optim.lr_scheduler import StepLR +from tqdm import tqdm # for progress bar +from argparse import ArgumentParser +import os +import time + +# --------------------- # + +# --- FOR CALCULATORS +import sys +_path = '.' +sys.path.append(os.path.join(_path)) +from fct_for_saving import save_cc +from fct_for_saving import save_ct +from fct_for_saving import save_eco2ai +from fct_for_saving import save_ga +from fct_for_saving import save_nocalc +from fct_for_saving import save_tapo +from fct_for_tapo import stop_TAPO +import psutil +import GPUtil +from fct_for_ga import stop_UTIL, mean_parallel_UTIL +from fct_for_experiments import ExpParams +from fct_for_experiments import prepare_calculator +from fct_for_experiments import start_calculators +from fct_for_experiments import stop_calculators +# --------------------- + +def main(): + + print("# ------------------------------ #") + print("# #") + print("# ------------- #") + print("# -- START -- #") + print("# ------------- #") + print("# #") + print("# ------------------------------ #") + + parser = ArgumentParser() + + parser.add_argument('--batch-size', type=int, default=64, metavar='N', help='input batch size for training (default: 64)') + parser.add_argument('--test-batch-size', type=int, default=1000, metavar='N', help='input batch size for testing (default: 1000)') + parser.add_argument('--epochs', type=int, default=1, metavar='N', help='number of epochs to train (default: 14)') + # parser.add_argument('--save-model', action='store_true', default=False, help='For Saving the current Model') + + parser.add_argument("--data_folder", type=str, help = "TODO", default = './data') + parser.add_argument("--use_accelerator", type=str, help = "TODO", default = True) + parser.add_argument("--save_model", type=str, help = "TODO", default = False) + parser.add_argument("--ml_phase", type=str, help = "TODO", default = "inference") + parser.add_argument("--calculator", type=str, help = "TODO", default = "no_calculator") + parser.add_argument("--calculator_mode", type=str, help = "TODO", default = "") + parser.add_argument("--dev_test", type=str, help = "TODO", default = "False") + parser.add_argument("--nb_batch_inferences", type=int, help = "TODO", default = 100) + parser.add_argument("--name_exp", type=str, help = "TODO", default = 'mnist') + parser.add_argument("--computer", type=str, help = "TODO", default = 'linux_alienware') + parser.add_argument("--path_logs_and_results", type=str, help = "TODO", default = '.') + parser.add_argument("--output_dir", default=".", type=str, help="path to save outputs") + parser.add_argument("--idle_time", default=60, type=int, help="duration of idle state tracking") + + # Calculators and modes: + # code_carbon + # -> online, offline + # carbon_tracker + # -> measure, predict + # eco2ai + # energy_scopium + # flops + # green_algorithms + # -> declarative, automated, automated_parallel + # no_calculator + # tapo + + args_parser = parser.parse_args() + + # --- FOR CALCULATORS + exp = ExpParams(args_parser) + # ------------------- + + ####################### + ##### Preparation ##### + ####################### + + # --- FOR CALCULATORS + tracker = prepare_calculator(exp) + if exp.name_calc == 'green_algorithms': + cpu_util = [] + gpu_util = [] + ram_util = [] + start_calculators(exp, tracker) + t0 = time.time() + # ------------------- + + ##################### + ##### idle ##### + ##################### + + print("# ---------------------- #") + print("# --- idle start --- #") + print("# ---------------------- #") + + # --- FOR CALCULATORS + if exp.name_calc == "carbon_tracker": + tracker.epoch_start() + # ------------------- + + time.sleep(args_parser.idle_time) + + # --- FOR CALCULATORS + if exp.name_calc == "carbon_tracker": + tracker.epoch_end() + # ------------------- + + print("# --------------------- #") + print("# --- idle stop --- #") + print("# --------------------- #") + + + # --- FOR CALCULATORS + tfinal = time.time() + duration = tfinal - t0 + stop_calculators(exp, tracker) + + # Saving the data: + if exp.name_calc == 'code_carbon': + save_cc(exp, args_parser, duration) + elif exp.name_calc == 'carbon_tracker': + save_ct(exp, args_parser, duration) + elif exp.name_calc == 'eco2ai': + save_eco2ai(exp, args_parser, duration) + elif exp.name_calc == 'green_algorithms': + if exp.automated and exp.parallel: + stop_UTIL(exp, t0, tfinal) + cpu_util, gpu_util, ram_util = mean_parallel_UTIL(exp) + save_ga(exp, args_parser, duration, + exp.automated, cpu_util, gpu_util, ram_util) + elif exp.name_calc == 'tapo': + stop_TAPO(exp, t0, tfinal) + save_tapo(exp, args_parser) + else: # no calculator + save_nocalc(exp, args_parser, duration) + # ---------------------- + + +if __name__ == '__main__': + main() diff --git a/exp-1-MNIST/MNIST-ALL.py b/exp-1-MNIST/MNIST-ALL.py new file mode 100644 index 0000000..01fa981 --- /dev/null +++ b/exp-1-MNIST/MNIST-ALL.py @@ -0,0 +1,205 @@ +import torch +# import torch.nn.functional as F +import torch.optim as optim +from torch.optim.lr_scheduler import StepLR +from tqdm import tqdm # for progress bar +from argparse import ArgumentParser +import os +import time + +# - Specific to MNIST - # +from fct_for_mnist import Net, train_0, create_dataloaders +# --------------------- # + +# --- FOR CALCULATORS +import sys +_path = '.' +sys.path.append(os.path.join(_path)) +from tqdm import tqdm +from fct_for_saving import save_cc +from fct_for_saving import save_ct +from fct_for_saving import save_eco2ai +from fct_for_saving import save_FLOPS +from fct_for_saving import save_ga +from fct_for_saving import save_nocalc +from fct_for_saving import save_tapo +from fct_for_tapo import stop_TAPO +from fct_for_ga import stop_UTIL, mean_parallel_UTIL +from fct_for_experiments import ExpParams +from fct_for_experiments import prepare_calculator +from fct_for_experiments import start_calculators +from fct_for_experiments import stop_calculators +from fct_for_experiments import flops_method_pytorch +# --------------------- + +def main(): + + print("# ------------------------------ #") + print("# #") + print("# ------------- #") + print("# -- START -- #") + print("# ------------- #") + print("# #") + print("# ------------------------------ #") + + parser = ArgumentParser() + parser.add_argument('--batch-size', type=int, default=64, metavar='N', + help='input batch size for training (default: 64)') + parser.add_argument('--test-batch-size', type=int, default=1000, metavar='N', + help='input batch size for testing (default: 1000)') + parser.add_argument('--epochs', type=int, default=14, metavar='N', + help='number of epochs to train (default: 14)') + parser.add_argument('--lr', type=float, default=1.0, metavar='LR', + help='learning rate (default: 1.0)') + parser.add_argument('--gamma', type=float, default=0.7, metavar='M', + help='Learning rate step gamma (default: 0.7)') + parser.add_argument('--no-cuda', action='store_true', default=False, + help='disables CUDA training') + parser.add_argument('--no-mps', action='store_true', default=False, + help='disables macOS GPU training') + parser.add_argument('--dry-run', action='store_true', default=False, + help='quickly check a single pass') + parser.add_argument('--seed', type=int, default=1, metavar='S', + help='random seed (default: 1)') + parser.add_argument('--log-interval', type=int, default=10, metavar='N', + help='how many batches to wait before logging training status') + # parser.add_argument('--save-model', action='store_true', default=False, + # help='For Saving the current Model') + + parser.add_argument("--data_folder", type=str, help = "TODO", default = './data') + parser.add_argument("--use_accelerator", type=str, help = "TODO", default = True) + parser.add_argument("--save_model", type=str, help = "TODO", default = False) + parser.add_argument("--ml_phase", type=str, help = "TODO", default = "inference") + parser.add_argument("--calculator", type=str, help = "TODO", default = "no_calculator") + parser.add_argument("--calculator_mode", type=str, help = "TODO", default = "") + parser.add_argument("--dev_test", type=str, help = "TODO", default = "False") + parser.add_argument("--nb_batch_inferences", type=int, help = "TODO", default = 100) + parser.add_argument("--name_exp", type=str, help = "TODO", default = 'mnist') + parser.add_argument("--computer", type=str, help = "TODO", default = 'linux_alienware') + parser.add_argument("--path_logs_and_results", type=str, help = "TODO", default = '.') + parser.add_argument("--output_dir", default=".", type=str, help="path to save outputs") + + + args_parser = parser.parse_args() + + # --- FOR CALCULATORS + exp = ExpParams(args_parser) + # ------------------- + + ####################### + ##### Preparation ##### + ####################### + + train = train_0 # don't display training stats + print('---------------') + print(exp.device_name == 'cuda') + print('--------------') + train_loader, test_loader = create_dataloaders(exp.device_name == 'cuda', args_parser) + model = Net().to(exp.device) + optimizer = optim.Adadelta(model.parameters(), lr=args_parser.lr) + scheduler = StepLR(optimizer, step_size=1, gamma=args_parser.gamma) + + + # --- FOR CALCULATORS + tracker = prepare_calculator(exp) + if exp.name_calc == 'green_algorithms': + cpu_util = [] + gpu_util = [] + ram_util = [] + start_calculators(exp, tracker) + t0 = time.time() + # ------------------- + + + ##################### + ##### Training ##### + ##################### + + if (exp.ml == "training") and exp.name_calc != 'flops': + + print("# ---------------------- #") + print("# --- training start --- #") + print("# ---------------------- #") + + + for epoch in tqdm(range(1, exp.epochs + 1)): + + # --- FOR CALCULATORS + if exp.name_calc == "carbon_tracker": + tracker.epoch_start() + # ------------------- + + train(args_parser, model, exp.device, train_loader, optimizer, epoch) + scheduler.step() + + # --- FOR CALCULATORS + if exp.name_calc == "carbon_tracker": + tracker.epoch_end() + # ------------------- + + model_path = os.path.join(args_parser.output_dir, "mnist_cnn.pt") + torch.save(model.state_dict(), model_path) + + print("# --------------------- #") + print("# --- training stop --- #") + print("# --------------------- #") + + + ##################### + ##### Inference ##### + ##################### + + if (exp.ml == "inference") and not exp.name_calc == 'flops': + + print("# ----------------------- #") + print("# --- inference start --- #") + print("# ----------------------- #") + + # recover the saved model: + PATH = os.path.join(_path, "models", "mnist_cnn.pt") + model.load_state_dict(torch.load(PATH)) + + for kk in tqdm(range(args_parser.nb_batch_inferences)): + inputs, targets = next(iter(test_loader)) + inputs = inputs.to(exp.device) + output = model(inputs) + pred = output.argmax(dim=1, keepdim=True) + # get the index of the max log-probability + + print("# -------------------------- #") + print("# --- tag inference stop --- #") + print("# -------------------------- #") + + + # --- FOR CALCULATORS + tfinal = time.time() + duration = tfinal - t0 + stop_calculators(exp, tracker) + copy_model = model.to(torch.device("cpu")) + Ec_kWh = flops_method_pytorch(exp, train_loader, copy_model) + + # Saving the data: + if exp.name_calc == 'code_carbon': + save_cc(exp, args_parser, duration) + elif exp.name_calc == 'carbon_tracker': + save_ct(exp, args_parser, duration) + elif exp.name_calc == 'eco2ai': + save_eco2ai(exp, args_parser, duration) + elif exp.name_calc == 'green_algorithms': + if exp.automated and exp.parallel: + stop_UTIL(exp, t0, tfinal) + cpu_util, gpu_util, ram_util = mean_parallel_UTIL(exp) + save_ga(exp, args_parser, duration, + exp.automated, cpu_util, gpu_util, ram_util) + elif exp.name_calc == 'tapo': + stop_TAPO(exp, t0, tfinal) + save_tapo(exp, args_parser) + elif exp.name_calc =='flops': + save_FLOPS(exp, args_parser, Ec_kWh) + else: # no calculator + save_nocalc(exp, args_parser, duration) + # ---------------------- + + +if __name__ == '__main__': + main() diff --git a/exp-1-MNIST/fct_for_mnist.py b/exp-1-MNIST/fct_for_mnist.py new file mode 100644 index 0000000..11d5e74 --- /dev/null +++ b/exp-1-MNIST/fct_for_mnist.py @@ -0,0 +1,144 @@ +from __future__ import print_function +import torch +import torch.nn as nn +import torch.nn.functional as F +# import torch.optim as optim +from torchvision import datasets, transforms +# from torch.optim.lr_scheduler import StepLR +# from tqdm import tqdm # for progress bar + +# import sys +# sys.path.append('../') +# from save_data import save_data +# import time + +# Decide if we deplay info during training: +# display_train = False + +##################### +##### Section 1 ##### +##################### + +# Definition of the ML model, training epoch, and testing function + +class Net(nn.Module): + def __init__(self): + super(Net, self).__init__() + self.conv1 = nn.Conv2d(1, 32, 3, 1) + self.conv2 = nn.Conv2d(32, 64, 3, 1) + self.dropout1 = nn.Dropout(0.25) + self.dropout2 = nn.Dropout(0.5) + self.fc1 = nn.Linear(9216, 128) + self.fc2 = nn.Linear(128, 10) + + def forward(self, x): + x = self.conv1(x) + x = F.relu(x) + x = self.conv2(x) + x = F.relu(x) + x = F.max_pool2d(x, 2) + x = self.dropout1(x) + x = torch.flatten(x, 1) + x = self.fc1(x) + x = F.relu(x) + x = self.dropout2(x) + x = self.fc2(x) + output = F.log_softmax(x, dim=1) + return output + + +def train_0(args, model, device, train_loader, optimizer, epoch): + """ Training without display of loss """ + model.train() + for batch_idx, (data, target) in enumerate(train_loader): + data, target = data.to(device), target.to(device) + optimizer.zero_grad() + output = model(data) + loss = F.nll_loss(output, target) + loss.backward() + optimizer.step() + +def train_1(args, model, device, train_loader, optimizer, epoch): + """ Training with display of loss """ + model.train() + for batch_idx, (data, target) in enumerate(train_loader): + data, target = data.to(device), target.to(device) + optimizer.zero_grad() + output = model(data) + loss = F.nll_loss(output, target) + loss.backward() + optimizer.step() + if batch_idx % args.log_interval == 0: + print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format( + epoch, batch_idx * len(data), len(train_loader.dataset), + 100. * batch_idx / len(train_loader), loss.item())) + if args.dry_run: + break + +def test(model, device, test_loader): + model.eval() + test_loss = 0 + correct = 0 + with torch.no_grad(): + for data, target in test_loader: + data, target = data.to(device), target.to(device) + output = model(data) + test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss + pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability + correct += pred.eq(target.view_as(pred)).sum().item() + + test_loss /= len(test_loader.dataset) + + print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format( + test_loss, correct, len(test_loader.dataset), + 100. * correct / len(test_loader.dataset))) + +####################### +##### Section 2.1 ##### +####################### + +# Creating the Params class that will store all the parameters of the experiment + +# class Params(): +# def __init__(self, batch_size = 64, test_batch_size = 1000, epochs = 14, lr = 1.0, +# gamma = 0.7, no_cuda = False, no_mps = False, dry_run = False, seed = 1, +# log_interval = 10, save_model = False, data_folder = './data', +# nb_batch_inferences = 100, dev_test = False): + # self.batch_size = batch_size # input batch size for training (default: 64) + # self.test_batch_size = test_batch_size # input batch size for testing (default: 1000) + # self.epochs = epochs # number of epochs to train (default: 14) + # self.lr = lr # learning rate (default: 1.0) + # self.gamma = gamma # learning rate step gamma (default: 0.7) + # self.no_cuda = no_cuda # disables CUDA training + # self.no_mps = no_mps # disables macOS GPU training + # self.dry_run = dry_run # quickly check a single pass + # self.seed = seed # random seed (default: 1) + # self.log_interval = log_interval # how many batches to wait before logging training status (default: 10) + # self.save_model = save_model # For Saving the current Model + # self.data_folder = data_folder + # self.nb_batch_inferences = nb_batch_inferences + + # if dev_test: + # # For tests: + # self.epochs = 2 + # self.nb_batch_inferences = 10 + +def create_dataloaders(use_cuda, args): + train_kwargs = {'batch_size': args.batch_size} + test_kwargs = {'batch_size': args.test_batch_size} + if use_cuda: + cuda_kwargs = {'num_workers': 1,'pin_memory': True,'shuffle': True} + train_kwargs.update(cuda_kwargs) + test_kwargs.update(cuda_kwargs) + + transform=transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize((0.1307,), (0.3081,)) + ]) + dataset1 = datasets.MNIST(args.data_folder, train=True, download=True, + transform=transform) + dataset2 = datasets.MNIST(args.data_folder, train=False, + transform=transform) + train_loader = torch.utils.data.DataLoader(dataset1,**train_kwargs) + test_loader = torch.utils.data.DataLoader(dataset2, **test_kwargs) + return(train_loader, test_loader) \ No newline at end of file diff --git a/exp-2-CIFAR10/CIFAR10-ALL.py b/exp-2-CIFAR10/CIFAR10-ALL.py new file mode 100644 index 0000000..716d007 --- /dev/null +++ b/exp-2-CIFAR10/CIFAR10-ALL.py @@ -0,0 +1,195 @@ +import torch +# import torch.nn.functional as F +import torch.optim as optim +from torch.optim.lr_scheduler import StepLR +from argparse import ArgumentParser +import os +import time + +# - Specific to CIFAR10 - # +from fct_for_cifar10 import Net, train_0, create_dataloaders +from torch.nn import CrossEntropyLoss +# ----------------------- # + +# --- FOR CALCULATORS +import sys +_path = '.' +sys.path.append(os.path.join(_path)) +from tqdm import tqdm +from fct_for_saving import save_cc +from fct_for_saving import save_ct +from fct_for_saving import save_eco2ai +from fct_for_saving import save_FLOPS +from fct_for_saving import save_ga +from fct_for_saving import save_nocalc +from fct_for_saving import save_tapo +from fct_for_tapo import stop_TAPO +import psutil +import GPUtil +from fct_for_ga import stop_UTIL, mean_parallel_UTIL +from fct_for_experiments import ExpParams +from fct_for_experiments import prepare_calculator +from fct_for_experiments import start_calculators +from fct_for_experiments import stop_calculators +from fct_for_experiments import flops_method_pytorch +# --------------------- + +def main(): + + print("# ------------------------------ #") + print("# #") + print("# ------------- #") + print("# -- START -- #") + print("# ------------- #") + print("# #") + print("# ------------------------------ #") + + parser = ArgumentParser() + + parser.add_argument("--batch_size", type=int, help = "TODO", default = 4) + parser.add_argument("--test_batch_size", type=int, help = "TODO", default = 4) + parser.add_argument("--lr", type=float, help = "TODO", default = 0.001) + parser.add_argument("--momentum", type=float, help = "TODO", default = 0.9) + parser.add_argument("--seed", type=float, help = "TODO", default = 1) + parser.add_argument("--data_folder", type=str, help = "TODO", default = './data') + + parser.add_argument("--use_accelerator", type=str, help = "TODO", default = True) + parser.add_argument("--save_model", type=str, help = "TODO", default = False) + parser.add_argument("--ml_phase", type=str, help = "TODO", default = "inference") + parser.add_argument("--calculator", type=str, help = "TODO", default = "no_calculator") + parser.add_argument("--calculator_mode", type=str, help = "TODO", default = "") + parser.add_argument("--dev_test", type=str, help = "TODO", default = "False") + parser.add_argument("--nb_batch_inferences", type=int, help = "TODO", default = 1000) + parser.add_argument("--name_exp", type=str, help = "TODO", default = 'cifar10') + parser.add_argument("--computer", type=str, help = "TODO", default = 'linux_alienware') + parser.add_argument("--epochs", type=int, help = "TODO", default = 10) + parser.add_argument("--path_logs_and_results", type=str, help = "TODO", default = '.') + parser.add_argument("--output_dir", default=".", type=str, help="path to save outputs") + + # --------------------- + + args_parser = parser.parse_args() + + # --- FOR CALCULATORS + exp = ExpParams(args_parser) + # ------------------- + + + ##################### + #### Preparation #### + ##################### + + train = train_0 # don't display training stats + train_loader, test_loader = create_dataloaders(args_parser) + model = Net().to(exp.device) + criterion = CrossEntropyLoss() # Define a Loss function and optimizer below + optimizer = optim.SGD(model.parameters(), lr=args_parser.lr, momentum=args_parser.momentum) + # Classification Cross-Entropy loss and SGD with momentum + + + # --- FOR CALCULATORS + tracker = prepare_calculator(exp) + if exp.name_calc == 'green_algorithms': + cpu_util = [] + gpu_util = [] + ram_util = [] + start_calculators(exp, tracker) + t0 = time.time() + # ------------------- + + + ##################### + ##### Training ##### + ##################### + + if (exp.ml == "training") and exp.name_calc != 'flops': + + print("# ---------------------- #") + print("# --- training start --- #") + print("# ---------------------- #") + + for epoch in tqdm(range(1, exp.epochs + 1)): + + # --- FOR CALCULATORS + if exp.name_calc == "carbon_tracker": + tracker.epoch_start() + # ------------------- + + train(model, exp.device, train_loader, criterion, optimizer, epoch) + + # --- FOR CALCULATORS + if exp.name_calc == "carbon_tracker": + tracker.epoch_end() + # ------------------- + + # if exp.save_model: + model_path = os.path.join(args_parser.output_dir, "cifar_net.pth") + torch.save(model.state_dict(), model_path) + print("# --------------------- #") + print("# --- training stop --- #") + print("# --------------------- #") + + + ##################### + ##### Inference ##### + ##################### + + if (exp.ml == "inference") and not exp.name_calc == 'flops': + + print("# ----------------------- #") + print("# --- inference start --- #") + print("# ----------------------- #") + + # recover the saved model: + PATH = os.path.join(_path, "models", "cifar_net.pth") + model.load_state_dict(torch.load(PATH)) + + for kk in tqdm(range(args_parser.nb_batch_inferences)): + input, targets = next(iter(test_loader)) + input = input.to(exp.device) + output = model(input) + _, pred = torch.max(output, 1) + + # --- FOR CALCULATORS + if exp.name_calc == 'green_algorithms' and exp.automated and (not exp.parallel): + cpu_util.append(psutil.cpu_percent()) + gpu_util.append(GPUtil.getGPUs()[0].load) + ram_util.append(psutil.virtual_memory()[3]/1000000000) + # ------------------ + + print("# ---------------------- #") + print("# --- inference stop --- #") + print("# ---------------------- #") + + + # --- FOR CALCULATORS + tfinal = time.time() + duration = tfinal - t0 + stop_calculators(exp, tracker) + copy_model = model.to(torch.device("cpu")) + Ec_kWh = flops_method_pytorch(exp, train_loader, copy_model) + + # Saving the data: + if exp.name_calc == 'code_carbon': + save_cc(exp, args_parser, duration) + elif exp.name_calc == 'carbon_tracker': + save_ct(exp, args_parser, duration) + elif exp.name_calc == 'eco2ai': + save_eco2ai(exp, args_parser, duration) + elif exp.name_calc == 'green_algorithms': + if exp.automated and exp.parallel: + stop_UTIL(exp, t0, tfinal) + cpu_util, gpu_util, ram_util = mean_parallel_UTIL(exp) + save_ga(exp, args_parser, duration, + exp.automated, cpu_util, gpu_util, ram_util) + elif exp.name_calc == 'tapo': + stop_TAPO(exp, t0, tfinal) + save_tapo(exp, args_parser) + elif exp.name_calc =='flops': + save_FLOPS(exp, args_parser, Ec_kWh) + else: # no calculator + save_nocalc(exp, args_parser, duration) + # ---------------------- + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/exp-2-CIFAR10/fct_for_cifar10.py b/exp-2-CIFAR10/fct_for_cifar10.py new file mode 100644 index 0000000..0c03871 --- /dev/null +++ b/exp-2-CIFAR10/fct_for_cifar10.py @@ -0,0 +1,325 @@ +import torch +import torchvision +import torchvision.transforms as transforms +import matplotlib.pyplot as plt +import numpy as np +import torch.nn as nn +import torch.nn.functional as F +import torch.optim as optim +# from tqdm import tqdm # for progress bar +# import os +# import sys +# sys.path.append('../') +# from save_data import save_data +# import pandas as pd + +##################### +##### Section 1 ##### +##################### + +# Definition of the ML model, and training epoch + +class Net(nn.Module): + def __init__(self): + super().__init__() + self.conv1 = nn.Conv2d(3, 6, 5) + self.pool = nn.MaxPool2d(2, 2) + self.conv2 = nn.Conv2d(6, 16, 5) + self.fc1 = nn.Linear(16 * 5 * 5, 120) + self.fc2 = nn.Linear(120, 84) + self.fc3 = nn.Linear(84, 10) + + def forward(self, x): + x = self.pool(F.relu(self.conv1(x))) + x = self.pool(F.relu(self.conv2(x))) + x = torch.flatten(x, 1) # flatten all dimensions except batch + x = F.relu(self.fc1(x)) + x = F.relu(self.fc2(x)) + x = self.fc3(x) + return x + +def train_0(net, device, trainloader, criterion, optimizer, epoch): + """ Training with display of loss """ + for i, data in enumerate(trainloader, 0): + # get the inputs; data is a list of [inputs, labels] + inputs, labels = data[0].to(device), data[1].to(device) + + # zero the parameter gradients + optimizer.zero_grad() + + # forward + backward + optimize + outputs = net(inputs) + loss = criterion(outputs, labels) + loss.backward() + optimizer.step() + + +def train_1(net, device, trainloader, criterion, optimizer, epoch): + """ Training with display of loss """ + running_loss = 0.0 + for i, data in enumerate(trainloader, 0): + # get the inputs; data is a list of [inputs, labels] + inputs, labels = data[0].to(device), data[1].to(device) + + # zero the parameter gradients + optimizer.zero_grad() + + # forward + backward + optimize + outputs = net(inputs) + loss = criterion(outputs, labels) + loss.backward() + optimizer.step() + + # print statistics + running_loss += loss.item() + if i % 2000 == 1999: # print every 2000 mini-batches + print(f'[{epoch + 1}, {i + 1:5d}] loss: {running_loss / 2000:.3f}') + running_loss = 0.0 + + +# function to show an image +def imshow(img): + img = img / 2 + 0.5 # unnormalize + npimg = img.numpy() + plt.imshow(np.transpose(npimg, (1, 2, 0))) + plt.show() + + +# class Params(): +# def __init__(self, batch_size = 4, lr = 0.001, momentum = 0.9, +# seed = 1, data_folder = './data'): +# self.batch_size = batch_size # input batch size for training (default: 4) +# self.test_batch_size = batch_size # input batch size for inference (default: 4) +# self.lr = lr # learning rate (default: 0.001) +# self.momentum = momentum +# self.seed = seed # random seed (default: 1) +# self.data_folder = data_folder + +def create_dataloaders(args): + + # --- Note from pytorch --- # + # If running on Windows and you get a BrokenPipeError, + # try setting the num_worker of torch.utils.data.DataLoader() to 0 + # --- --- # + + # load CIFRAR10 using torchvision + normalize it + # -> PILImage images of range [0, 1] + # + transform these images to Tensors of normailzed range [-1, 1] + + transform = transforms.Compose( + [transforms.ToTensor(), + transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))]) + + trainset = torchvision.datasets.CIFAR10(root=args.data_folder, train=True, + download=True, transform=transform) + trainloader = torch.utils.data.DataLoader(trainset, batch_size=args.batch_size, + shuffle=True, num_workers=2) + + testset = torchvision.datasets.CIFAR10(root=args.data_folder, train=False, + download=True, transform=transform) + testloader = torch.utils.data.DataLoader(testset, batch_size=args.batch_size, + shuffle=False, num_workers=2) + return(trainloader, testloader) + + + + + + +def main(): + + no_accelerator = True + + # Setting the parameters and eventual accelerator + + # Settings + batch_size = 4 # input batch size for training (default: 64) + epochs = 10 # number of epochs to train (default: 14) + lr = 0.001 # learning rate (default: 1.0) + momentum = 0.9 # ... + + use_cuda = not no_accelerator and torch.cuda.is_available() + use_mps = not no_accelerator and torch.backends.mps.is_available() + + if use_cuda: + device = torch.device("cuda") + elif use_mps: + device = torch.device("mps") + else: + device = torch.device("cpu") + print("Device is: ", device) + + ##################### + ##### Section 3 ##### + ##################### + + classes = ('plane', 'car', 'bird', 'cat', + 'deer', 'dog', 'frog', 'horse', 'ship', 'truck') + + # get some random training images + dataiter = iter(trainloader) + images, labels = next(dataiter) + + if display_images == True: + # show images + imshow(torchvision.utils.make_grid(images)) + # print labels + print(' '.join(f'{classes[labels[j]]:5s}' for j in range(batch_size))) + + # Instance of the ML model + net = Net().to(device) + + # Define a Loss function and optimizer: + # Classification Cross-Entropy loss and SGD with momentum + criterion = nn.CrossEntropyLoss() + optimizer = optim.SGD(net.parameters(), lr=lr, momentum=momentum) + + ##################### + ##### Section 4 ##### + ##################### + + # Training + + # -- --- --- -- # + # - CC block - # + # - - # + + # We create the tracker's instance: + output_file = "output_cc.csv" + + tracker = EmissionsTracker(output_file = output_file) + + print("# ---------------------- #") + print("# --- training start --- #") + print("# ---------------------- #") + tracker.start() + # - - # + # - - # + # -- --- --- -- # + + if display_train == False: + train = train_0 + # Loop over the data iterator, feed the inputs to the network and optimize. + for epoch in tqdm(range(epochs)): # loop over the dataset multiple times + train(net, device, trainloader, criterion, optimizer, epoch) + else: + train = train_1 + # Loop over the data iterator, feed the inputs to the network and optimize. + for epoch in range(epochs): # loop over the dataset multiple times + train(net, device, trainloader, criterion, optimizer, epoch) + + # -- --- --- -- # + # - CC block - # + # - - # + print("# --------------------- #") + print("# --- training stop --- #") + print("# --------------------- #") + tracker.stop() + # - - # + # - - # + # -- --- --- -- # + + # save the trained model + PATH = './cifar_net.pth' + torch.save(net.state_dict(), PATH) + # more on saving Pytorch models: https://pytorch.org/docs/stable/notes/serialization.html + + ##################### + ##### Section 5 ##### + ##################### + + # Inference + + # recover the saved model + PATH = "./cifar_net.pth" + net = Net().to(device) + net.load_state_dict(torch.load(PATH)) + + nb_batch_inferences = 100 + nb_inferences = nb_batch_inferences*batch_size + + # -- --- --- -- # + # - CC block - # + # - - # + print("# ----------------------- #") + print("# --- inference start --- #") + print("# ----------------------- #") + output_file = "output_cc.csv" + # measure_power_secs = 0.1 + + # save_to_logger + tracker = EmissionsTracker(output_file = output_file) + tracker.start() + # - - # + # - - # + # -- --- --- -- # + + for kk in tqdm(range(nb_batch_inferences)): + inputs, targets = next(iter(testloader)) + inputs = inputs.to(device) + outputs = net(inputs) + _, one_pred = torch.max(outputs, 1) + + + # -- --- --- -- # + # - CC block - # + # - - # + print("# -------------------------- #") + print("# --- tag inference stop --- #") + print("# -------------------------- #") + tracker.stop() + # - - # + # - - # + # -- --- --- -- # + + + + # -- --- --- -- # + # - CC block - # + # - - # + + # Saving the data in the json file + + experience = "cifar10" + device_type = device.type + calculator = "CC" + + # -------------------------------- # + # - CHANGE DEPENDING ON COMPUTER - # + # # + computer = "linux_alienware" + # # + # -------------------------------- # + + + file = pd.read_csv("output_cc.csv") + + df=pd.DataFrame(file) + + # For training: + ml_phase = "training" + meas_epochs = epochs + meas_time = df["duration"].iloc[-2] + meas_energy = df["energy_consumed"].iloc[-2] + meas_co2 = df["emissions"].iloc[-2] + save_data(experience, ml_phase, computer, device_type, calculator, meas_epochs, meas_time, meas_energy, meas_co2) + + # For inference: + ml_phase = "inference" + meas_epochs = "N/A" + meas_time = df["duration"].iloc[-1] + meas_energy = df["energy_consumed"].iloc[-1] + meas_co2 = df["emissions"].iloc[-1] + meas_time = meas_time/nb_inferences + meas_energy = meas_energy/nb_inferences + meas_co2 = meas_co2/nb_inferences + + save_data(experience, ml_phase, computer, device_type, calculator, meas_epochs, meas_time, meas_energy, meas_co2) + + os.remove("output_cc.csv") + # - - # + # - - # + # -- --- --- -- # + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/exp-3-resnet18/README-ResNet18.md b/exp-3-resnet18/README-ResNet18.md new file mode 100644 index 0000000..8c45f3c --- /dev/null +++ b/exp-3-resnet18/README-ResNet18.md @@ -0,0 +1,69 @@ +# Tutorial ResNet18 pre-training on ImageNet (and CUB_200_2011) + +**Programming language:** Developed and tested using python 3.10 + +## About the ResNet18 training code +We are using the official code from PyTorch for training ResNet18 on ImageNet. + +The script is called "Image classification reference training scripts". We will be using the function train.py located at https://github.com/pytorch/vision/blob/main/references/classification/train.py + +Usefull links: +- ResNet page: https://pytorch.org/vision/stable/models/resnet.html +- ResNet scientific paper: https://arxiv.org/pdf/1512.03385.pdf +- Resnet18 page: https://pytorch.org/vision/stable/models/generated/torchvision.models.resnet18.html#torchvision.models.resnet18 + + + +## Data preparation for the ImageNet dataset + +**Step 1**: Download dataset from kaggle +- Download the dataset at https://www.kaggle.com/c/imagenet-object-localization-challenge/overview/description. Go to the Data page and download data. It is contained in a folder named ILSVRC in Kaggle. You will obtained an archive named imagenet-object-localisation-challenge.zip **Warning: the archive weights 167.62 GB.** + +- Extract the downloaded archive. Only keep the folder: ILSVRC/Data/train/ and name it images/. This folder contains subfolders corresponding to different classes of images. Each subfolder contains 1300 images. + +- Place the folder images/ in a location of your choice path_to_location/. For instance, you may create a folder /data/image_net and place the folder /images inside. + +**Step 2**: Prepare the data +```Shellsession +demo> python preprocess_data_imagenet.py --data_path='path_to_location/' +``` +The data will be prepared for the training. + + + + +## Usage + +**Training:** +For arguments description including default values use: +```Shellsession +demo> python train.py --help +``` + +**Example:** +```Shellsession +demo> python train.py --epochs=2 --data-path='path_to_location/' --batch-size=16 --workers=8 +``` + + + +## Appendix: data preparation for CUB_200_2011 dataset + +**Step 1**: Download dataset from kaggle +- Download the dataset at https://www.kaggle.com/datasets/veeralakrishna/200-bird-species-with-11788-images. +- Extract the downloaded archive. It contains two tar subfolders, one of which is named "CUB_200_2011.tgz". +- Place CUB_200_2011.tgz in the folder /energycalculatorsevaluation/data. + +**Step 2**: Use *prepare_data()* function from *preprocess_data.py* +Arguments: +- ```data_path```: Path to downloaded data. +- ```archive_name```: Name of downloaded archive +- ```seed``` default(1234), +- ```valid_data_amount``` default(0.9): Split data for training and validation. + +For instance, open the terminal, go in the folder resnet_classification_pytorch_vision, open a python shell and run the following commands: +```python +from preprocess_data import prepare_data +prepare_data(data_path="../data", archive_name="CUB_200_2011.tgz") +``` +The data will be prepared for the training. \ No newline at end of file diff --git a/exp-3-resnet18/classification_pytorch_vision.py b/exp-3-resnet18/classification_pytorch_vision.py new file mode 100644 index 0000000..2f8ca3c --- /dev/null +++ b/exp-3-resnet18/classification_pytorch_vision.py @@ -0,0 +1,703 @@ +import datetime +import os +import time +import warnings + +import presets +import torch +import torch.utils.data +import torchvision +import transforms +import utils +from sampler import RASampler +from torch import nn +from torch.utils.data.dataloader import default_collate +from torchvision.transforms.functional import InterpolationMode + + +# from codecarbon import EmissionsTracker +# from carbontracker.tracker import CarbonTracker +# from carbontracker import parser as CTparser +# import eco2ai +# from experiment_impact_tracker.compute_tracker import ImpactTracker + + +# --- FOR CALCULATORS +import sys +_path = '.' +sys.path.append(os.path.join(_path)) +from tqdm import tqdm +# from codecarbon import EmissionsTracker, OfflineEmissionsTracker +# from carbontracker.tracker import CarbonTracker +from fct_for_saving import save_cc +from fct_for_saving import save_ct +from fct_for_saving import save_eco2ai +from fct_for_saving import save_FLOPS +from fct_for_saving import save_ga +from fct_for_saving import save_nocalc +from fct_for_saving import save_tapo +from fct_for_tapo import stop_TAPO +import psutil +import GPUtil +from fct_for_ga import stop_UTIL, mean_parallel_UTIL +from fct_for_experiments import ExpParams +from fct_for_experiments import prepare_calculator +from fct_for_experiments import start_calculators +from fct_for_experiments import stop_calculators +from fct_for_experiments import flops_method_pytorch +# --------------------- + +def train_one_epoch(model, criterion, optimizer, data_loader, device, epoch, args, model_ema=None, scaler=None): + model.train() + metric_logger = utils.MetricLogger(delimiter=" ") + metric_logger.add_meter("lr", utils.SmoothedValue(window_size=1, fmt="{value}")) + metric_logger.add_meter("img/s", utils.SmoothedValue(window_size=10, fmt="{value}")) + + header = f"Epoch: [{epoch}]" + for i, (image, target) in enumerate(metric_logger.log_every(data_loader, args.print_freq, header)): + start_time = time.time() + image, target = image.to(device), target.to(device) + with torch.cuda.amp.autocast(enabled=scaler is not None): + output = model(image) + loss = criterion(output, target) + + optimizer.zero_grad() + if scaler is not None: + scaler.scale(loss).backward() + if args.clip_grad_norm is not None: + # we should unscale the gradients of optimizer's assigned params if do gradient clipping + scaler.unscale_(optimizer) + nn.utils.clip_grad_norm_(model.parameters(), args.clip_grad_norm) + scaler.step(optimizer) + scaler.update() + else: + loss.backward() + if args.clip_grad_norm is not None: + nn.utils.clip_grad_norm_(model.parameters(), args.clip_grad_norm) + optimizer.step() + + if model_ema and i % args.model_ema_steps == 0: + model_ema.update_parameters(model) + if epoch < args.lr_warmup_epochs: + # Reset ema buffer to keep copying weights during warmup period + model_ema.n_averaged.fill_(0) + + acc1, acc5 = utils.accuracy(output, target, topk=(1, 5)) + batch_size = image.shape[0] + metric_logger.update(loss=loss.item(), lr=optimizer.param_groups[0]["lr"]) + metric_logger.meters["acc1"].update(acc1.item(), n=batch_size) + metric_logger.meters["acc5"].update(acc5.item(), n=batch_size) + metric_logger.meters["img/s"].update(batch_size / (time.time() - start_time)) + + +def evaluate(model, criterion, data_loader, device, print_freq=100, log_suffix=""): + model.eval() + metric_logger = utils.MetricLogger(delimiter=" ") + header = f"Test: {log_suffix}" + + num_processed_samples = 0 + with torch.inference_mode(): + for image, target in metric_logger.log_every(data_loader, print_freq, header): + image = image.to(device, non_blocking=True) + target = target.to(device, non_blocking=True) + output = model(image) + loss = criterion(output, target) + + acc1, acc5 = utils.accuracy(output, target, topk=(1, 5)) + # FIXME need to take into account that the datasets + # could have been padded in distributed setup + batch_size = image.shape[0] + metric_logger.update(loss=loss.item()) + metric_logger.meters["acc1"].update(acc1.item(), n=batch_size) + metric_logger.meters["acc5"].update(acc5.item(), n=batch_size) + num_processed_samples += batch_size + # gather the stats from all processes + + num_processed_samples = utils.reduce_across_processes(num_processed_samples) + if ( + hasattr(data_loader.dataset, "__len__") + and len(data_loader.dataset) != num_processed_samples + and torch.distributed.get_rank() == 0 + ): + # See FIXME above + warnings.warn( + f"It looks like the dataset has {len(data_loader.dataset)} samples, but {num_processed_samples} " + "samples were used for the validation, which might bias the results. " + "Try adjusting the batch size and / or the world size. " + "Setting the world size to 1 is always a safe bet." + ) + + metric_logger.synchronize_between_processes() + + print(f"{header} Acc@1 {metric_logger.acc1.global_avg:.3f} Acc@5 {metric_logger.acc5.global_avg:.3f}") + return metric_logger.acc1.global_avg + + +def _get_cache_path(filepath): + import hashlib + + h = hashlib.sha1(filepath.encode()).hexdigest() + cache_path = os.path.join("~", ".torch", "vision", "datasets", "imagefolder", h[:10] + ".pt") + cache_path = os.path.expanduser(cache_path) + return cache_path + + +def load_data(traindir, valdir, args): + # Data loading code + print("Loading data") + val_resize_size, val_crop_size, train_crop_size = ( + args.val_resize_size, + args.val_crop_size, + args.train_crop_size, + ) + interpolation = InterpolationMode(args.interpolation) + + print("Loading training data") + st = time.time() + cache_path = _get_cache_path(traindir) + if args.cache_dataset and os.path.exists(cache_path): + # Attention, as the transforms are also cached! + print(f"Loading dataset_train from {cache_path}") + dataset, _ = torch.load(cache_path) + else: + # We need a default value for the variables below because args may come + # from train_quantization.py which doesn't define them. + auto_augment_policy = getattr(args, "auto_augment", None) + random_erase_prob = getattr(args, "random_erase", 0.0) + ra_magnitude = getattr(args, "ra_magnitude", None) + augmix_severity = getattr(args, "augmix_severity", None) + dataset = torchvision.datasets.ImageFolder( + traindir, + presets.ClassificationPresetTrain( + crop_size=train_crop_size, + interpolation=interpolation, + auto_augment_policy=auto_augment_policy, + random_erase_prob=random_erase_prob, + ra_magnitude=ra_magnitude, + augmix_severity=augmix_severity, + ), + ) + if args.cache_dataset: + print(f"Saving dataset_train to {cache_path}") + utils.mkdir(os.path.dirname(cache_path)) + utils.save_on_master((dataset, traindir), cache_path) + print("Took", time.time() - st) + + print("Loading validation data") + cache_path = _get_cache_path(valdir) + if args.cache_dataset and os.path.exists(cache_path): + # Attention, as the transforms are also cached! + print(f"Loading dataset_test from {cache_path}") + dataset_test, _ = torch.load(cache_path) + else: + if args.weights and args.test_only: + weights = torchvision.models.get_weight(args.weights) + preprocessing = weights.transforms() + else: + preprocessing = presets.ClassificationPresetEval( + crop_size=val_crop_size, resize_size=val_resize_size, interpolation=interpolation + ) + + dataset_test = torchvision.datasets.ImageFolder( + valdir, + preprocessing, + ) + if args.cache_dataset: + print(f"Saving dataset_test to {cache_path}") + utils.mkdir(os.path.dirname(cache_path)) + utils.save_on_master((dataset_test, valdir), cache_path) + + print("Creating data loaders") + if args.distributed: + if hasattr(args, "ra_sampler") and args.ra_sampler: + train_sampler = RASampler(dataset, shuffle=True, repetitions=args.ra_reps) + else: + train_sampler = torch.utils.data.distributed.DistributedSampler(dataset) + test_sampler = torch.utils.data.distributed.DistributedSampler(dataset_test, shuffle=False) + else: + train_sampler = torch.utils.data.RandomSampler(dataset) + test_sampler = torch.utils.data.SequentialSampler(dataset_test) + + return dataset, dataset_test, train_sampler, test_sampler + + +def main(args): + + print("# ------------------------------ #") + print("# #") + print("# ------------- #") + print("# -- START -- #") + print("# ------------- #") + print("# #") + print("# ------------------------------ #") + + # --- FOR CALCULATORS + exp = ExpParams(args) + # ------------------- + + + if args.output_dir: + utils.mkdir(args.output_dir) + + utils.init_distributed_mode(args) + print(args) + + # device = torch.device(args.device) # from original code + device = exp.device + + + if args.use_deterministic_algorithms: + torch.backends.cudnn.benchmark = False + torch.use_deterministic_algorithms(True) + else: + torch.backends.cudnn.benchmark = True + + train_dir = os.path.join(args.data_path, "train") + val_dir = os.path.join(args.data_path, "test") + dataset, dataset_test, train_sampler, test_sampler = load_data(train_dir, val_dir, args) + + collate_fn = None + num_classes = len(dataset.classes) + mixup_transforms = [] + if args.mixup_alpha > 0.0: + mixup_transforms.append(transforms.RandomMixup(num_classes, p=1.0, alpha=args.mixup_alpha)) + if args.cutmix_alpha > 0.0: + mixup_transforms.append(transforms.RandomCutmix(num_classes, p=1.0, alpha=args.cutmix_alpha)) + if mixup_transforms: + mixupcutmix = torchvision.transforms.RandomChoice(mixup_transforms) + + def collate_fn(batch): + return mixupcutmix(*default_collate(batch)) + + data_loader = torch.utils.data.DataLoader( + dataset, + batch_size=args.batch_size, + sampler=train_sampler, + num_workers=args.workers, + pin_memory=True, + collate_fn=collate_fn, + ) + data_loader_test = torch.utils.data.DataLoader( + dataset_test, batch_size=args.batch_size, sampler=test_sampler, num_workers=args.workers, pin_memory=True + ) + + print("Creating model") + model = torchvision.models.get_model(args.model, weights=args.weights, num_classes=num_classes) + model.to(device) + + if args.distributed and args.sync_bn: + model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model) + + criterion = nn.CrossEntropyLoss(label_smoothing=args.label_smoothing) + + custom_keys_weight_decay = [] + if args.bias_weight_decay is not None: + custom_keys_weight_decay.append(("bias", args.bias_weight_decay)) + if args.transformer_embedding_decay is not None: + for key in ["class_token", "position_embedding", "relative_position_bias_table"]: + custom_keys_weight_decay.append((key, args.transformer_embedding_decay)) + parameters = utils.set_weight_decay( + model, + args.weight_decay, + norm_weight_decay=args.norm_weight_decay, + custom_keys_weight_decay=custom_keys_weight_decay if len(custom_keys_weight_decay) > 0 else None, + ) + + opt_name = args.opt.lower() + if opt_name.startswith("sgd"): + optimizer = torch.optim.SGD( + parameters, + lr=args.lr, + momentum=args.momentum, + weight_decay=args.weight_decay, + nesterov="nesterov" in opt_name, + ) + elif opt_name == "rmsprop": + optimizer = torch.optim.RMSprop( + parameters, lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay, eps=0.0316, alpha=0.9 + ) + elif opt_name == "adamw": + optimizer = torch.optim.AdamW(parameters, lr=args.lr, weight_decay=args.weight_decay) + else: + raise RuntimeError(f"Invalid optimizer {args.opt}. Only SGD, RMSprop and AdamW are supported.") + + scaler = torch.cuda.amp.GradScaler() if args.amp else None + + args.lr_scheduler = args.lr_scheduler.lower() + if args.lr_scheduler == "steplr": + main_lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=args.lr_step_size, gamma=args.lr_gamma) + elif args.lr_scheduler == "cosineannealinglr": + main_lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR( + optimizer, T_max=args.epochs - args.lr_warmup_epochs, eta_min=args.lr_min + ) + elif args.lr_scheduler == "exponentiallr": + main_lr_scheduler = torch.optim.lr_scheduler.ExponentialLR(optimizer, gamma=args.lr_gamma) + else: + raise RuntimeError( + f"Invalid lr scheduler '{args.lr_scheduler}'. Only StepLR, CosineAnnealingLR and ExponentialLR " + "are supported." + ) + + if args.lr_warmup_epochs > 0: + if args.lr_warmup_method == "linear": + warmup_lr_scheduler = torch.optim.lr_scheduler.LinearLR( + optimizer, start_factor=args.lr_warmup_decay, total_iters=args.lr_warmup_epochs + ) + elif args.lr_warmup_method == "constant": + warmup_lr_scheduler = torch.optim.lr_scheduler.ConstantLR( + optimizer, factor=args.lr_warmup_decay, total_iters=args.lr_warmup_epochs + ) + else: + raise RuntimeError( + f"Invalid warmup lr method '{args.lr_warmup_method}'. Only linear and constant are supported." + ) + lr_scheduler = torch.optim.lr_scheduler.SequentialLR( + optimizer, schedulers=[warmup_lr_scheduler, main_lr_scheduler], milestones=[args.lr_warmup_epochs] + ) + else: + lr_scheduler = main_lr_scheduler + + model_without_ddp = model + if args.distributed: + model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu]) + model_without_ddp = model.module + + model_ema = None + if args.model_ema: + # Decay adjustment that aims to keep the decay independent of other hyper-parameters originally proposed at: + # https://github.com/facebookresearch/pycls/blob/f8cd9627/pycls/core/net.py#L123 + # + # total_ema_updates = (Dataset_size / n_GPUs) * epochs / (batch_size_per_gpu * EMA_steps) + # We consider constant = Dataset_size for a given dataset/setup and omit it. Thus: + # adjust = 1 / total_ema_updates ~= n_GPUs * batch_size_per_gpu * EMA_steps / epochs + adjust = args.world_size * args.batch_size * args.model_ema_steps / args.epochs + alpha = 1.0 - args.model_ema_decay + alpha = min(1.0, alpha * adjust) + model_ema = utils.ExponentialMovingAverage(model_without_ddp, device=device, decay=1.0 - alpha) + + if args.resume: + checkpoint = torch.load(args.resume, map_location="cpu") + model_without_ddp.load_state_dict(checkpoint["model"]) + if not args.test_only: + optimizer.load_state_dict(checkpoint["optimizer"]) + lr_scheduler.load_state_dict(checkpoint["lr_scheduler"]) + args.start_epoch = checkpoint["epoch"] + 1 + if model_ema: + model_ema.load_state_dict(checkpoint["model_ema"]) + if scaler: + scaler.load_state_dict(checkpoint["scaler"]) + + if args.test_only: + # We disable the cudnn benchmarking because it can noticeably affect the accuracy + torch.backends.cudnn.benchmark = False + torch.backends.cudnn.deterministic = True + if model_ema: + evaluate(model_ema, criterion, data_loader_test, device=device, log_suffix="EMA") + else: + evaluate(model, criterion, data_loader_test, device=device) + return + + + + + # --- FOR CALCULATORS + tracker = prepare_calculator(exp) + if exp.name_calc == 'green_algorithms': + cpu_util = [] + gpu_util = [] + ram_util = [] + start_calculators(exp, tracker) + t0 = time.time() + # ------------------- + + + + + ##################### + ##### Training ##### + ##################### + + if (exp.ml == "training") and exp.name_calc != 'flops': + + print("# ---------------------- #") + print("# --- training start --- #") + print("# ---------------------- #") + + # tracker = get_calculator(args.epochs, args.calculator) + + # print("Start training") + # start_time = time.time() + + # if args.calculator != "carbon_tracker" and args.calculator != None and args.calculator != "impact_tracker": + # tracker.start() + # if args.calculator == "impact_tracker": + # tracker.launch_impact_monitor() + + + for epoch in range(args.start_epoch, args.epochs): + + # --- FOR CALCULATORS + if exp.name_calc == "carbon_tracker": + tracker.epoch_start() + # ------------------- + + # --- TRAIN ONE EPOCH + if args.distributed: + train_sampler.set_epoch(epoch) + train_one_epoch(model, criterion, optimizer, data_loader, device, epoch, args, model_ema, scaler) + lr_scheduler.step() + evaluate(model, criterion, data_loader_test, device=device) + if model_ema: + evaluate(model_ema, criterion, data_loader_test, device=device, log_suffix="EMA") + if args.output_dir: + checkpoint = { + "model": model_without_ddp.state_dict(), + "optimizer": optimizer.state_dict(), + "lr_scheduler": lr_scheduler.state_dict(), + "epoch": epoch, + "args": args, + } + if model_ema: + checkpoint["model_ema"] = model_ema.state_dict() + if scaler: + checkpoint["scaler"] = scaler.state_dict() + utils.save_on_master(checkpoint, os.path.join(args.output_dir, f"model_{epoch}.pth")) + utils.save_on_master(checkpoint, os.path.join(args.output_dir, "checkpoint.pth")) + # ------------------ + + # --- FOR CALCULATORS + if exp.name_calc == "carbon_tracker": + tracker.epoch_end() + # ------------------- + + + print("# ---------------------- #") + print("# --- training end --- #") + print("# ---------------------- #") + + + ##################### + ##### Inference ##### + ##################### + + if (exp.ml == "inference") and not exp.name_calc == 'flops': + + print("# ----------------------- #") + print("# --- inference start --- #") + print("# ----------------------- #") + + # PATH = os.path.join(_path, "models", "ResNet_imageNet.pth") + # model.load_state_dict(torch.load(PATH)) + + for kk in tqdm(range(args.nb_batch_inferences)): + + # --- ONE BATCH INFERENCE + image, target = next(iter(data_loader_test)) + image = image.to(device, non_blocking=True) + output = model(image) + batch_size = image.shape[0] + # ----------------------- + + # --- FOR CALCULATORS + if exp.name_calc == 'green_algorithms' and exp.automated and (not exp.parallel): + cpu_util.append(psutil.cpu_percent()) + gpu_util.append(GPUtil.getGPUs()[0].load) + ram_util.append(psutil.virtual_memory()[3]/1000000000) + # ------------------ + + + print("# ----------------------- #") + print("# --- inference end --- #") + print("# ----------------------- #") + + + # --- FOR CALCULATORS + tfinal= time.time() + duration = tfinal - t0 + stop_calculators(exp, tracker) + copy_model = model.to(torch.device("cpu")) + Ec_kWh = flops_method_pytorch(exp, data_loader, copy_model) + + # Saving the data: + if exp.name_calc == 'code_carbon': + save_cc(exp, args, duration) + elif exp.name_calc == 'carbon_tracker': + save_ct(exp, args, duration) + elif exp.name_calc == 'eco2ai': + save_eco2ai(exp, args, duration) + elif exp.name_calc == 'green_algorithms': + if exp.automated and exp.parallel: + stop_UTIL(exp, t0, tfinal) + cpu_util, gpu_util, ram_util = mean_parallel_UTIL(exp) + save_ga(exp, args, duration, exp.automated, cpu_util, gpu_util, ram_util) + elif exp.name_calc == 'tapo': + stop_TAPO(exp, t0, tfinal) + save_tapo(exp, args) + elif exp.name_calc =='flops': + save_FLOPS(exp, args, Ec_kWh) + else: # no calculator + save_nocalc(exp, args, duration) + # ---------------------- + + + + + +def get_args_parser(add_help=True): + import argparse + + parser = argparse.ArgumentParser(description="PyTorch Classification Training", add_help=add_help) + + parser.add_argument("--data-path", default="/datasets01/imagenet_full_size/061417/", type=str, help="dataset path") + parser.add_argument("--model", default="resnet18", type=str, help="model name") + parser.add_argument("--device", default="cuda", type=str, help="device (Use cuda or cpu Default: cuda)") + parser.add_argument( + "-b", "--batch-size", default=32, type=int, help="images per gpu, the total batch size is $NGPU x batch_size" + ) + parser.add_argument("--epochs", default=90, type=int, metavar="N", help="number of total epochs to run") + parser.add_argument( + "-j", "--workers", default=16, type=int, metavar="N", help="number of data loading workers (default: 16)" + ) + parser.add_argument("--opt", default="sgd", type=str, help="optimizer") + parser.add_argument("--lr", default=0.1, type=float, help="initial learning rate") + parser.add_argument("--momentum", default=0.9, type=float, metavar="M", help="momentum") + parser.add_argument( + "--wd", + "--weight-decay", + default=1e-4, + type=float, + metavar="W", + help="weight decay (default: 1e-4)", + dest="weight_decay", + ) + parser.add_argument( + "--norm-weight-decay", + default=None, + type=float, + help="weight decay for Normalization layers (default: None, same value as --wd)", + ) + parser.add_argument( + "--bias-weight-decay", + default=None, + type=float, + help="weight decay for bias parameters of all layers (default: None, same value as --wd)", + ) + parser.add_argument( + "--transformer-embedding-decay", + default=None, + type=float, + help="weight decay for embedding parameters for vision transformer models (default: None, same value as --wd)", + ) + parser.add_argument( + "--label-smoothing", default=0.0, type=float, help="label smoothing (default: 0.0)", dest="label_smoothing" + ) + parser.add_argument("--mixup-alpha", default=0.0, type=float, help="mixup alpha (default: 0.0)") + parser.add_argument("--cutmix-alpha", default=0.0, type=float, help="cutmix alpha (default: 0.0)") + parser.add_argument("--lr-scheduler", default="steplr", type=str, help="the lr scheduler (default: steplr)") + parser.add_argument("--lr-warmup-epochs", default=0, type=int, help="the number of epochs to warmup (default: 0)") + parser.add_argument( + "--lr-warmup-method", default="constant", type=str, help="the warmup method (default: constant)" + ) + parser.add_argument("--lr-warmup-decay", default=0.01, type=float, help="the decay for lr") + parser.add_argument("--lr-step-size", default=30, type=int, help="decrease lr every step-size epochs") + parser.add_argument("--lr-gamma", default=0.1, type=float, help="decrease lr by a factor of lr-gamma") + parser.add_argument("--lr-min", default=0.0, type=float, help="minimum lr of lr schedule (default: 0.0)") + parser.add_argument("--print-freq", default=10, type=int, help="print frequency") + parser.add_argument("--output-dir", default=".", type=str, help="path to save outputs") + parser.add_argument("--resume", default="", type=str, help="path of checkpoint") + parser.add_argument("--start-epoch", default=0, type=int, metavar="N", help="start epoch") + parser.add_argument( + "--cache-dataset", + dest="cache_dataset", + help="Cache the datasets for quicker initialization. It also serializes the transforms", + action="store_true", + ) + parser.add_argument( + "--sync-bn", + dest="sync_bn", + help="Use sync batch norm", + action="store_true", + ) + parser.add_argument( + "--test-only", + dest="test_only", + help="Only test the model", + action="store_true", + ) + parser.add_argument("--auto-augment", default=None, type=str, help="auto augment policy (default: None)") + parser.add_argument("--ra-magnitude", default=9, type=int, help="magnitude of auto augment policy") + parser.add_argument("--augmix-severity", default=3, type=int, help="severity of augmix policy") + parser.add_argument("--random-erase", default=0.0, type=float, help="random erasing probability (default: 0.0)") + + # Mixed precision training parameters + parser.add_argument("--amp", action="store_true", help="Use torch.cuda.amp for mixed precision training") + + # distributed training parameters + parser.add_argument("--world-size", default=1, type=int, help="number of distributed processes") + parser.add_argument("--dist-url", default="env://", type=str, help="url used to set up distributed training") + parser.add_argument( + "--model-ema", action="store_true", help="enable tracking Exponential Moving Average of model parameters" + ) + parser.add_argument( + "--model-ema-steps", + type=int, + default=32, + help="the number of iterations that controls how often to update the EMA model (default: 32)", + ) + parser.add_argument( + "--model-ema-decay", + type=float, + default=0.99998, + help="decay factor for Exponential Moving Average of model parameters (default: 0.99998)", + ) + parser.add_argument( + "--use-deterministic-algorithms", action="store_true", help="Forces the use of deterministic algorithms only." + ) + parser.add_argument( + "--interpolation", default="bilinear", type=str, help="the interpolation method (default: bilinear)" + ) + parser.add_argument( + "--val-resize-size", default=256, type=int, help="the resize size used for validation (default: 256)" + ) + parser.add_argument( + "--val-crop-size", default=224, type=int, help="the central crop size used for validation (default: 224)" + ) + parser.add_argument( + "--train-crop-size", default=224, type=int, help="the random crop size used for training (default: 224)" + ) + parser.add_argument("--clip-grad-norm", default=None, type=float, help="the maximum gradient norm (default None)") + parser.add_argument("--ra-sampler", action="store_true", help="whether to use Repeated Augmentation in training") + parser.add_argument( + "--ra-reps", default=3, type=int, help="number of repetitions for Repeated Augmentation (default: 3)" + ) + parser.add_argument("--weights", default=None, type=str, help="the weights enum name to load") + + + # parser.add_argument("--calculator", default=None, type=str, help="calculator to meassure energy usage (Use carbon_tracker or code_carbon or eco2ai)") + + + + # --- FOR CALCULATORS + parser.add_argument("--use_accelerator", type=str, help = "TODO", default = 'True') + parser.add_argument("--save_model", type=str, help = "TODO", default = 'False') + parser.add_argument("--ml_phase", type=str, help = "TODO", default = "inference") + parser.add_argument("--calculator", type=str, help = "TODO", default = "no_calculator") + + parser.add_argument("--calculator_mode", type=str, help = "TODO", default = "") + + # should we remove this one? : + parser.add_argument("--dev_test", type=str, help = "TODO", default = "False") + + parser.add_argument("--nb_batch_inferences", type=int, help = "TODO", default = 20) + parser.add_argument("--name_exp", type=str, help = "TODO", default = 'CUB_200_2011') + parser.add_argument("--computer", type=str, help = "TODO", default = 'linux_alienware') + parser.add_argument("--path_logs_and_results", type=str, help = "TODO", default = '.') + # --------------------- + + + return parser + + +if __name__ == "__main__": + args = get_args_parser().parse_args() + main(args) diff --git a/exp-3-resnet18/preprocess_data.py b/exp-3-resnet18/preprocess_data.py new file mode 100644 index 0000000..0ebda2f --- /dev/null +++ b/exp-3-resnet18/preprocess_data.py @@ -0,0 +1,183 @@ +import copy +import numpy as np +import os +import random +import shutil + +import torch +import torchvision.datasets as datasets +import torchvision.transforms as transforms +import torch.utils.data as data + + +def prepare_data( + data_path: str, + archive_name: str, + seed: int = 1234, + valid_data_amount: float = 0.9, + ): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed(seed) + torch.backends.cudnn.deterministic = True + + data_to_extract = os.path.join(data_path, archive_name) + datasets.utils.extract_archive(data_to_extract, data_path) + + TRAIN_RATIO = 0.8 + + data_name = archive_name.split('.')[0] + data_dir = os.path.join(data_path, data_name) + + images_dir = os.path.join(data_dir, 'images') + train_dir = os.path.join(data_dir, 'train') + test_dir = os.path.join(data_dir, 'test') + + if os.path.exists(train_dir): + shutil.rmtree(train_dir) + if os.path.exists(test_dir): + shutil.rmtree(test_dir) + + os.makedirs(train_dir) + os.makedirs(test_dir) + + classes = os.listdir(images_dir) + + for c in classes: + class_dir = os.path.join(images_dir, c) + images = os.listdir(class_dir) + + n_train = int(len(images) * TRAIN_RATIO) + train_images = images[:n_train] + test_images = images[n_train:] + + os.makedirs(os.path.join(train_dir, c), exist_ok = True) + os.makedirs(os.path.join(test_dir, c), exist_ok = True) + + for image in train_images: + image_src = os.path.join(class_dir, image) + image_dst = os.path.join(train_dir, c, image) + shutil.copyfile(image_src, image_dst) + + for image in test_images: + image_src = os.path.join(class_dir, image) + image_dst = os.path.join(test_dir, c, image) + shutil.copyfile(image_src, image_dst) + + train_data = datasets.ImageFolder(root = train_dir, + transform = transforms.ToTensor()) + + means = torch.zeros(3) + stds = torch.zeros(3) + + for img, label in train_data: + means += torch.mean(img, dim = (1,2)) + stds += torch.std(img, dim = (1,2)) + + means /= len(train_data) + stds /= len(train_data) + + print(f'Calculated means: {means}') + print(f'Calculated stds: {stds}') + + pretrained_size = 224 + pretrained_means = [0.485, 0.456, 0.406] + pretrained_stds= [0.229, 0.224, 0.225] + + train_transforms = transforms.Compose([ + transforms.Resize(pretrained_size), + transforms.RandomRotation(5), + transforms.RandomHorizontalFlip(0.5), + transforms.RandomCrop(pretrained_size, padding = 10), + transforms.ToTensor(), + transforms.Normalize(mean = pretrained_means, + std = pretrained_stds) + ]) + + test_transforms = transforms.Compose([ + transforms.Resize(pretrained_size), + transforms.CenterCrop(pretrained_size), + transforms.ToTensor(), + transforms.Normalize(mean = pretrained_means, + std = pretrained_stds) + ]) + + train_data = datasets.ImageFolder(root = train_dir, + transform = train_transforms) + + test_data = datasets.ImageFolder(root = test_dir, + transform = test_transforms) + + VALID_RATIO = valid_data_amount + + n_train_examples = int(len(train_data) * VALID_RATIO) + n_valid_examples = len(train_data) - n_train_examples + + train_data, valid_data = data.random_split(train_data, + [n_train_examples, n_valid_examples]) + + valid_data = copy.deepcopy(valid_data) + valid_data.dataset.transform = test_transforms + + print(f'Number of training examples: {len(train_data)}') + print(f'Number of validation examples: {len(valid_data)}') + print(f'Number of testing examples: {len(test_data)}') + + return train_data, valid_data, test_data + + + +def prepare_image_net_data( + data_path: str = 'data', + folder_name: str = 'image_net', + seed: int = 1234, + ): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed(seed) + torch.backends.cudnn.deterministic = True + + TRAIN_RATIO = 0.8 + + data_dir = os.path.join(data_path, folder_name) + + images_dir = os.path.join(data_dir, 'images') + train_dir = os.path.join(data_dir, 'train') + test_dir = os.path.join(data_dir, 'test') + + if os.path.exists(train_dir): + shutil.rmtree(train_dir) + if os.path.exists(test_dir): + shutil.rmtree(test_dir) + + os.makedirs(train_dir) + os.makedirs(test_dir) + + classes = os.listdir(images_dir) + + print(classes) + + for c in classes: + class_dir = os.path.join(images_dir, c) + images = os.listdir(class_dir) + + # maybe shuffle images + + n_train = int(len(images) * TRAIN_RATIO) + train_images = images[:n_train] + test_images = images[n_train:] + + os.makedirs(os.path.join(train_dir, c), exist_ok = True) + os.makedirs(os.path.join(test_dir, c), exist_ok = True) + + for image in train_images: + image_src = os.path.join(class_dir, image) + image_dst = os.path.join(train_dir, c, image) + shutil.copyfile(image_src, image_dst) + + for image in test_images: + image_src = os.path.join(class_dir, image) + image_dst = os.path.join(test_dir, c, image) + shutil.copyfile(image_src, image_dst) diff --git a/exp-3-resnet18/preprocess_data_imagenet.py b/exp-3-resnet18/preprocess_data_imagenet.py new file mode 100644 index 0000000..3e6f129 --- /dev/null +++ b/exp-3-resnet18/preprocess_data_imagenet.py @@ -0,0 +1,66 @@ +import numpy as np +import os +import random +import shutil +import torch +from argparse import ArgumentParser + +def prepare_image_net_data( + data_path: str = os.path.join('data','image_net'), + seed: int = 1234, + ): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed(seed) + torch.backends.cudnn.deterministic = True + + TRAIN_RATIO = 0.8 + + data_dir = data_path + + images_dir = os.path.join(data_dir, 'images') + train_dir = os.path.join(data_dir, 'train') + test_dir = os.path.join(data_dir, 'test') + + if os.path.exists(train_dir): + shutil.rmtree(train_dir) + if os.path.exists(test_dir): + shutil.rmtree(test_dir) + + os.makedirs(train_dir) + os.makedirs(test_dir) + + classes = os.listdir(images_dir) + + print(classes) + + for c in classes: + class_dir = os.path.join(images_dir, c) + images = os.listdir(class_dir) + + # maybe shuffle images + + n_train = int(len(images) * TRAIN_RATIO) + train_images = images[:n_train] + test_images = images[n_train:] + + os.makedirs(os.path.join(train_dir, c), exist_ok = True) + os.makedirs(os.path.join(test_dir, c), exist_ok = True) + + for image in train_images: + image_src = os.path.join(class_dir, image) + image_dst = os.path.join(train_dir, c, image) + shutil.copyfile(image_src, image_dst) + + for image in test_images: + image_src = os.path.join(class_dir, image) + image_dst = os.path.join(test_dir, c, image) + shutil.copyfile(image_src, image_dst) + + +if __name__=='__main__': + parser = ArgumentParser() + parser.add_argument("--data_path", type=str, help = "path to the folder images/", default = os.path.join('data','image_net')) + args_parser = parser.parse_args() + prepare_image_net_data(data_path=args_parser.data_path) \ No newline at end of file diff --git a/exp-3-resnet18/presets.py b/exp-3-resnet18/presets.py new file mode 100644 index 0000000..602a2f4 --- /dev/null +++ b/exp-3-resnet18/presets.py @@ -0,0 +1,71 @@ +import torch +from torchvision.transforms import autoaugment, transforms +from torchvision.transforms.functional import InterpolationMode + + +class ClassificationPresetTrain: + def __init__( + self, + *, + crop_size, + mean=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + interpolation=InterpolationMode.BILINEAR, + hflip_prob=0.5, + auto_augment_policy=None, + ra_magnitude=9, + augmix_severity=3, + random_erase_prob=0.0, + ): + trans = [transforms.RandomResizedCrop(crop_size, interpolation=interpolation)] + if hflip_prob > 0: + trans.append(transforms.RandomHorizontalFlip(hflip_prob)) + if auto_augment_policy is not None: + if auto_augment_policy == "ra": + trans.append(autoaugment.RandAugment(interpolation=interpolation, magnitude=ra_magnitude)) + elif auto_augment_policy == "ta_wide": + trans.append(autoaugment.TrivialAugmentWide(interpolation=interpolation)) + elif auto_augment_policy == "augmix": + trans.append(autoaugment.AugMix(interpolation=interpolation, severity=augmix_severity)) + else: + aa_policy = autoaugment.AutoAugmentPolicy(auto_augment_policy) + trans.append(autoaugment.AutoAugment(policy=aa_policy, interpolation=interpolation)) + trans.extend( + [ + transforms.PILToTensor(), + transforms.ConvertImageDtype(torch.float), + transforms.Normalize(mean=mean, std=std), + ] + ) + if random_erase_prob > 0: + trans.append(transforms.RandomErasing(p=random_erase_prob)) + + self.transforms = transforms.Compose(trans) + + def __call__(self, img): + return self.transforms(img) + + +class ClassificationPresetEval: + def __init__( + self, + *, + crop_size, + resize_size=256, + mean=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + interpolation=InterpolationMode.BILINEAR, + ): + + self.transforms = transforms.Compose( + [ + transforms.Resize(resize_size, interpolation=interpolation), + transforms.CenterCrop(crop_size), + transforms.PILToTensor(), + transforms.ConvertImageDtype(torch.float), + transforms.Normalize(mean=mean, std=std), + ] + ) + + def __call__(self, img): + return self.transforms(img) \ No newline at end of file diff --git a/exp-3-resnet18/sampler.py b/exp-3-resnet18/sampler.py new file mode 100644 index 0000000..07c3ead --- /dev/null +++ b/exp-3-resnet18/sampler.py @@ -0,0 +1,61 @@ +import math + +import torch +import torch.distributed as dist + + +class RASampler(torch.utils.data.Sampler): + """Sampler that restricts data loading to a subset of the dataset for distributed, + with repeated augmentation. + It ensures that different each augmented version of a sample will be visible to a + different process (GPU). + Heavily based on 'torch.utils.data.DistributedSampler'. + This is borrowed from the DeiT Repo: + https://github.com/facebookresearch/deit/blob/main/samplers.py + """ + + def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, seed=0, repetitions=3): + if num_replicas is None: + if not dist.is_available(): + raise RuntimeError("Requires distributed package to be available!") + num_replicas = dist.get_world_size() + if rank is None: + if not dist.is_available(): + raise RuntimeError("Requires distributed package to be available!") + rank = dist.get_rank() + self.dataset = dataset + self.num_replicas = num_replicas + self.rank = rank + self.epoch = 0 + self.num_samples = int(math.ceil(len(self.dataset) * float(repetitions) / self.num_replicas)) + self.total_size = self.num_samples * self.num_replicas + self.num_selected_samples = int(math.floor(len(self.dataset) // 256 * 256 / self.num_replicas)) + self.shuffle = shuffle + self.seed = seed + self.repetitions = repetitions + + def __iter__(self): + if self.shuffle: + # Deterministically shuffle based on epoch + g = torch.Generator() + g.manual_seed(self.seed + self.epoch) + indices = torch.randperm(len(self.dataset), generator=g).tolist() + else: + indices = list(range(len(self.dataset))) + + # Add extra samples to make it evenly divisible + indices = [ele for ele in indices for i in range(self.repetitions)] + indices += indices[: (self.total_size - len(indices))] + assert len(indices) == self.total_size + + # Subsample + indices = indices[self.rank : self.total_size : self.num_replicas] + assert len(indices) == self.num_samples + + return iter(indices[: self.num_selected_samples]) + + def __len__(self): + return self.num_selected_samples + + def set_epoch(self, epoch): + self.epoch = epoch \ No newline at end of file diff --git a/exp-3-resnet18/transforms.py b/exp-3-resnet18/transforms.py new file mode 100644 index 0000000..b7b129a --- /dev/null +++ b/exp-3-resnet18/transforms.py @@ -0,0 +1,179 @@ +import math +from typing import Tuple + +import torch +from torch import Tensor +from torchvision.transforms import functional as F + + +class RandomMixup(torch.nn.Module): + """Randomly apply Mixup to the provided batch and targets. + The class implements the data augmentations as described in the paper + `"mixup: Beyond Empirical Risk Minimization" `_. + Args: + num_classes (int): number of classes used for one-hot encoding. + p (float): probability of the batch being transformed. Default value is 0.5. + alpha (float): hyperparameter of the Beta distribution used for mixup. + Default value is 1.0. + inplace (bool): boolean to make this transform inplace. Default set to False. + """ + + def __init__(self, num_classes: int, p: float = 0.5, alpha: float = 1.0, inplace: bool = False) -> None: + super().__init__() + + if num_classes < 1: + raise ValueError( + f"Please provide a valid positive value for the num_classes. Got num_classes={num_classes}" + ) + + if alpha <= 0: + raise ValueError("Alpha param can't be zero.") + + self.num_classes = num_classes + self.p = p + self.alpha = alpha + self.inplace = inplace + + def forward(self, batch: Tensor, target: Tensor) -> Tuple[Tensor, Tensor]: + """ + Args: + batch (Tensor): Float tensor of size (B, C, H, W) + target (Tensor): Integer tensor of size (B, ) + Returns: + Tensor: Randomly transformed batch. + """ + if batch.ndim != 4: + raise ValueError(f"Batch ndim should be 4. Got {batch.ndim}") + if target.ndim != 1: + raise ValueError(f"Target ndim should be 1. Got {target.ndim}") + if not batch.is_floating_point(): + raise TypeError(f"Batch dtype should be a float tensor. Got {batch.dtype}.") + if target.dtype != torch.int64: + raise TypeError(f"Target dtype should be torch.int64. Got {target.dtype}") + + if not self.inplace: + batch = batch.clone() + target = target.clone() + + if target.ndim == 1: + target = torch.nn.functional.one_hot(target, num_classes=self.num_classes).to(dtype=batch.dtype) + + if torch.rand(1).item() >= self.p: + return batch, target + + # It's faster to roll the batch by one instead of shuffling it to create image pairs + batch_rolled = batch.roll(1, 0) + target_rolled = target.roll(1, 0) + + # Implemented as on mixup paper, page 3. + lambda_param = float(torch._sample_dirichlet(torch.tensor([self.alpha, self.alpha]))[0]) + batch_rolled.mul_(1.0 - lambda_param) + batch.mul_(lambda_param).add_(batch_rolled) + + target_rolled.mul_(1.0 - lambda_param) + target.mul_(lambda_param).add_(target_rolled) + + return batch, target + + def __repr__(self) -> str: + s = ( + f"{self.__class__.__name__}(" + f"num_classes={self.num_classes}" + f", p={self.p}" + f", alpha={self.alpha}" + f", inplace={self.inplace}" + f")" + ) + return s + + +class RandomCutmix(torch.nn.Module): + """Randomly apply Cutmix to the provided batch and targets. + The class implements the data augmentations as described in the paper + `"CutMix: Regularization Strategy to Train Strong Classifiers with Localizable Features" + `_. + Args: + num_classes (int): number of classes used for one-hot encoding. + p (float): probability of the batch being transformed. Default value is 0.5. + alpha (float): hyperparameter of the Beta distribution used for cutmix. + Default value is 1.0. + inplace (bool): boolean to make this transform inplace. Default set to False. + """ + + def __init__(self, num_classes: int, p: float = 0.5, alpha: float = 1.0, inplace: bool = False) -> None: + super().__init__() + if num_classes < 1: + raise ValueError("Please provide a valid positive value for the num_classes.") + if alpha <= 0: + raise ValueError("Alpha param can't be zero.") + + self.num_classes = num_classes + self.p = p + self.alpha = alpha + self.inplace = inplace + + def forward(self, batch: Tensor, target: Tensor) -> Tuple[Tensor, Tensor]: + """ + Args: + batch (Tensor): Float tensor of size (B, C, H, W) + target (Tensor): Integer tensor of size (B, ) + Returns: + Tensor: Randomly transformed batch. + """ + if batch.ndim != 4: + raise ValueError(f"Batch ndim should be 4. Got {batch.ndim}") + if target.ndim != 1: + raise ValueError(f"Target ndim should be 1. Got {target.ndim}") + if not batch.is_floating_point(): + raise TypeError(f"Batch dtype should be a float tensor. Got {batch.dtype}.") + if target.dtype != torch.int64: + raise TypeError(f"Target dtype should be torch.int64. Got {target.dtype}") + + if not self.inplace: + batch = batch.clone() + target = target.clone() + + if target.ndim == 1: + target = torch.nn.functional.one_hot(target, num_classes=self.num_classes).to(dtype=batch.dtype) + + if torch.rand(1).item() >= self.p: + return batch, target + + # It's faster to roll the batch by one instead of shuffling it to create image pairs + batch_rolled = batch.roll(1, 0) + target_rolled = target.roll(1, 0) + + # Implemented as on cutmix paper, page 12 (with minor corrections on typos). + lambda_param = float(torch._sample_dirichlet(torch.tensor([self.alpha, self.alpha]))[0]) + _, H, W = F.get_dimensions(batch) + + r_x = torch.randint(W, (1,)) + r_y = torch.randint(H, (1,)) + + r = 0.5 * math.sqrt(1.0 - lambda_param) + r_w_half = int(r * W) + r_h_half = int(r * H) + + x1 = int(torch.clamp(r_x - r_w_half, min=0)) + y1 = int(torch.clamp(r_y - r_h_half, min=0)) + x2 = int(torch.clamp(r_x + r_w_half, max=W)) + y2 = int(torch.clamp(r_y + r_h_half, max=H)) + + batch[:, :, y1:y2, x1:x2] = batch_rolled[:, :, y1:y2, x1:x2] + lambda_param = float(1.0 - (x2 - x1) * (y2 - y1) / (W * H)) + + target_rolled.mul_(1.0 - lambda_param) + target.mul_(lambda_param).add_(target_rolled) + + return batch, target + + def __repr__(self) -> str: + s = ( + f"{self.__class__.__name__}(" + f"num_classes={self.num_classes}" + f", p={self.p}" + f", alpha={self.alpha}" + f", inplace={self.inplace}" + f")" + ) + return s \ No newline at end of file diff --git a/exp-3-resnet18/utils.py b/exp-3-resnet18/utils.py new file mode 100644 index 0000000..76ff131 --- /dev/null +++ b/exp-3-resnet18/utils.py @@ -0,0 +1,457 @@ +import copy +import datetime +import errno +import hashlib +import os +import time +from collections import defaultdict, deque, OrderedDict +from typing import List, Optional, Tuple + +import torch +import torch.distributed as dist + + +class SmoothedValue: + """Track a series of values and provide access to smoothed values over a + window or the global series average. + """ + + def __init__(self, window_size=20, fmt=None): + if fmt is None: + fmt = "{median:.4f} ({global_avg:.4f})" + self.deque = deque(maxlen=window_size) + self.total = 0.0 + self.count = 0 + self.fmt = fmt + + def update(self, value, n=1): + self.deque.append(value) + self.count += n + self.total += value * n + + def synchronize_between_processes(self): + """ + Warning: does not synchronize the deque! + """ + t = reduce_across_processes([self.count, self.total]) + t = t.tolist() + self.count = int(t[0]) + self.total = t[1] + + @property + def median(self): + d = torch.tensor(list(self.deque)) + return d.median().item() + + @property + def avg(self): + d = torch.tensor(list(self.deque), dtype=torch.float32) + return d.mean().item() + + @property + def global_avg(self): + return self.total / self.count + + @property + def max(self): + return max(self.deque) + + @property + def value(self): + return self.deque[-1] + + def __str__(self): + return self.fmt.format( + median=self.median, avg=self.avg, global_avg=self.global_avg, max=self.max, value=self.value + ) + + +class MetricLogger: + def __init__(self, delimiter="\t"): + self.meters = defaultdict(SmoothedValue) + self.delimiter = delimiter + + def update(self, **kwargs): + for k, v in kwargs.items(): + if isinstance(v, torch.Tensor): + v = v.item() + assert isinstance(v, (float, int)) + self.meters[k].update(v) + + def __getattr__(self, attr): + if attr in self.meters: + return self.meters[attr] + if attr in self.__dict__: + return self.__dict__[attr] + raise AttributeError(f"'{type(self).__name__}' object has no attribute '{attr}'") + + def __str__(self): + loss_str = [] + for name, meter in self.meters.items(): + loss_str.append(f"{name}: {str(meter)}") + return self.delimiter.join(loss_str) + + def synchronize_between_processes(self): + for meter in self.meters.values(): + meter.synchronize_between_processes() + + def add_meter(self, name, meter): + self.meters[name] = meter + + def log_every(self, iterable, print_freq, header=None): + i = 0 + if not header: + header = "" + start_time = time.time() + end = time.time() + iter_time = SmoothedValue(fmt="{avg:.4f}") + data_time = SmoothedValue(fmt="{avg:.4f}") + space_fmt = ":" + str(len(str(len(iterable)))) + "d" + if torch.cuda.is_available(): + log_msg = self.delimiter.join( + [ + header, + "[{0" + space_fmt + "}/{1}]", + "eta: {eta}", + "{meters}", + "time: {time}", + "data: {data}", + "max mem: {memory:.0f}", + ] + ) + else: + log_msg = self.delimiter.join( + [header, "[{0" + space_fmt + "}/{1}]", "eta: {eta}", "{meters}", "time: {time}", "data: {data}"] + ) + MB = 1024.0 * 1024.0 + for obj in iterable: + data_time.update(time.time() - end) + yield obj + iter_time.update(time.time() - end) + if i % print_freq == 0: + eta_seconds = iter_time.global_avg * (len(iterable) - i) + eta_string = str(datetime.timedelta(seconds=int(eta_seconds))) + if torch.cuda.is_available(): + print( + log_msg.format( + i, + len(iterable), + eta=eta_string, + meters=str(self), + time=str(iter_time), + data=str(data_time), + memory=torch.cuda.max_memory_allocated() / MB, + ) + ) + else: + print( + log_msg.format( + i, len(iterable), eta=eta_string, meters=str(self), time=str(iter_time), data=str(data_time) + ) + ) + i += 1 + end = time.time() + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + print(f"{header} Total time: {total_time_str}") + + +class ExponentialMovingAverage(torch.optim.swa_utils.AveragedModel): + """Maintains moving averages of model parameters using an exponential decay. + ``ema_avg = decay * avg_model_param + (1 - decay) * model_param`` + `torch.optim.swa_utils.AveragedModel `_ + is used to compute the EMA. + """ + + def __init__(self, model, decay, device="cpu"): + def ema_avg(avg_model_param, model_param, num_averaged): + return decay * avg_model_param + (1 - decay) * model_param + + super().__init__(model, device, ema_avg, use_buffers=True) + + +def accuracy(output, target, topk=(1,)): + """Computes the accuracy over the k top predictions for the specified values of k""" + with torch.inference_mode(): + maxk = max(topk) + batch_size = target.size(0) + if target.ndim == 2: + target = target.max(dim=1)[1] + + _, pred = output.topk(maxk, 1, True, True) + pred = pred.t() + correct = pred.eq(target[None]) + + res = [] + for k in topk: + correct_k = correct[:k].flatten().sum(dtype=torch.float32) + res.append(correct_k * (100.0 / batch_size)) + return res + + +def mkdir(path): + try: + os.makedirs(path) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + +def setup_for_distributed(is_master): + """ + This function disables printing when not in master process + """ + import builtins as __builtin__ + + builtin_print = __builtin__.print + + def print(*args, **kwargs): + force = kwargs.pop("force", False) + if is_master or force: + builtin_print(*args, **kwargs) + + __builtin__.print = print + + +def is_dist_avail_and_initialized(): + if not dist.is_available(): + return False + if not dist.is_initialized(): + return False + return True + + +def get_world_size(): + if not is_dist_avail_and_initialized(): + return 1 + return dist.get_world_size() + + +def get_rank(): + if not is_dist_avail_and_initialized(): + return 0 + return dist.get_rank() + + +def is_main_process(): + return get_rank() == 0 + + +def save_on_master(*args, **kwargs): + if is_main_process(): + torch.save(*args, **kwargs) + + +def init_distributed_mode(args): + if "RANK" in os.environ and "WORLD_SIZE" in os.environ: + args.rank = int(os.environ["RANK"]) + args.world_size = int(os.environ["WORLD_SIZE"]) + args.gpu = int(os.environ["LOCAL_RANK"]) + elif "SLURM_PROCID" in os.environ: + args.rank = int(os.environ["SLURM_PROCID"]) + args.gpu = args.rank % torch.cuda.device_count() + elif hasattr(args, "rank"): + pass + else: + print("Not using distributed mode") + args.distributed = False + return + + args.distributed = True + + torch.cuda.set_device(args.gpu) + args.dist_backend = "nccl" + print(f"| distributed init (rank {args.rank}): {args.dist_url}", flush=True) + torch.distributed.init_process_group( + backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size, rank=args.rank + ) + torch.distributed.barrier() + setup_for_distributed(args.rank == 0) + + +def average_checkpoints(inputs): + """Loads checkpoints from inputs and returns a model with averaged weights. Original implementation taken from: + https://github.com/pytorch/fairseq/blob/a48f235636557b8d3bc4922a6fa90f3a0fa57955/scripts/average_checkpoints.py#L16 + Args: + inputs (List[str]): An iterable of string paths of checkpoints to load from. + Returns: + A dict of string keys mapping to various values. The 'model' key + from the returned dict should correspond to an OrderedDict mapping + string parameter names to torch Tensors. + """ + params_dict = OrderedDict() + params_keys = None + new_state = None + num_models = len(inputs) + for fpath in inputs: + with open(fpath, "rb") as f: + state = torch.load( + f, + map_location=(lambda s, _: torch.serialization.default_restore_location(s, "cpu")), + ) + # Copies over the settings from the first checkpoint + if new_state is None: + new_state = state + model_params = state["model"] + model_params_keys = list(model_params.keys()) + if params_keys is None: + params_keys = model_params_keys + elif params_keys != model_params_keys: + raise KeyError( + f"For checkpoint {f}, expected list of params: {params_keys}, but found: {model_params_keys}" + ) + for k in params_keys: + p = model_params[k] + if isinstance(p, torch.HalfTensor): + p = p.float() + if k not in params_dict: + params_dict[k] = p.clone() + # NOTE: clone() is needed in case of p is a shared parameter + else: + params_dict[k] += p + averaged_params = OrderedDict() + for k, v in params_dict.items(): + averaged_params[k] = v + if averaged_params[k].is_floating_point(): + averaged_params[k].div_(num_models) + else: + averaged_params[k] //= num_models + new_state["model"] = averaged_params + return new_state + + +def store_model_weights(model, checkpoint_path, checkpoint_key="model", strict=True): + """ + This method can be used to prepare weights files for new models. It receives as + input a model architecture and a checkpoint from the training script and produces + a file with the weights ready for release. + Examples: + from torchvision import models as M + # Classification + model = M.mobilenet_v3_large(weights=None) + print(store_model_weights(model, './class.pth')) + # Quantized Classification + model = M.quantization.mobilenet_v3_large(weights=None, quantize=False) + model.fuse_model(is_qat=True) + model.qconfig = torch.ao.quantization.get_default_qat_qconfig('qnnpack') + _ = torch.ao.quantization.prepare_qat(model, inplace=True) + print(store_model_weights(model, './qat.pth')) + # Object Detection + model = M.detection.fasterrcnn_mobilenet_v3_large_fpn(weights=None, weights_backbone=None) + print(store_model_weights(model, './obj.pth')) + # Segmentation + model = M.segmentation.deeplabv3_mobilenet_v3_large(weights=None, weights_backbone=None, aux_loss=True) + print(store_model_weights(model, './segm.pth', strict=False)) + Args: + model (pytorch.nn.Module): The model on which the weights will be loaded for validation purposes. + checkpoint_path (str): The path of the checkpoint we will load. + checkpoint_key (str, optional): The key of the checkpoint where the model weights are stored. + Default: "model". + strict (bool): whether to strictly enforce that the keys + in :attr:`state_dict` match the keys returned by this module's + :meth:`~torch.nn.Module.state_dict` function. Default: ``True`` + Returns: + output_path (str): The location where the weights are saved. + """ + # Store the new model next to the checkpoint_path + checkpoint_path = os.path.abspath(checkpoint_path) + output_dir = os.path.dirname(checkpoint_path) + + # Deep copy to avoid side effects on the model object. + model = copy.deepcopy(model) + checkpoint = torch.load(checkpoint_path, map_location="cpu") + + # Load the weights to the model to validate that everything works + # and remove unnecessary weights (such as auxiliaries, etc.) + if checkpoint_key == "model_ema": + del checkpoint[checkpoint_key]["n_averaged"] + torch.nn.modules.utils.consume_prefix_in_state_dict_if_present(checkpoint[checkpoint_key], "module.") + model.load_state_dict(checkpoint[checkpoint_key], strict=strict) + + tmp_path = os.path.join(output_dir, str(model.__hash__())) + torch.save(model.state_dict(), tmp_path) + + sha256_hash = hashlib.sha256() + with open(tmp_path, "rb") as f: + # Read and update hash string value in blocks of 4K + for byte_block in iter(lambda: f.read(4096), b""): + sha256_hash.update(byte_block) + hh = sha256_hash.hexdigest() + + output_path = os.path.join(output_dir, "weights-" + str(hh[:8]) + ".pth") + os.replace(tmp_path, output_path) + + return output_path + + +def reduce_across_processes(val): + if not is_dist_avail_and_initialized(): + # nothing to sync, but we still convert to tensor for consistency with the distributed case. + return torch.tensor(val) + + t = torch.tensor(val, device="cuda") + dist.barrier() + dist.all_reduce(t) + return t + + +def set_weight_decay( + model: torch.nn.Module, + weight_decay: float, + norm_weight_decay: Optional[float] = None, + norm_classes: Optional[List[type]] = None, + custom_keys_weight_decay: Optional[List[Tuple[str, float]]] = None, +): + if not norm_classes: + norm_classes = [ + torch.nn.modules.batchnorm._BatchNorm, + torch.nn.LayerNorm, + torch.nn.GroupNorm, + torch.nn.modules.instancenorm._InstanceNorm, + torch.nn.LocalResponseNorm, + ] + norm_classes = tuple(norm_classes) + + params = { + "other": [], + "norm": [], + } + params_weight_decay = { + "other": weight_decay, + "norm": norm_weight_decay, + } + custom_keys = [] + if custom_keys_weight_decay is not None: + for key, weight_decay in custom_keys_weight_decay: + params[key] = [] + params_weight_decay[key] = weight_decay + custom_keys.append(key) + + def _add_params(module, prefix=""): + for name, p in module.named_parameters(recurse=False): + if not p.requires_grad: + continue + is_custom_key = False + for key in custom_keys: + target_name = f"{prefix}.{name}" if prefix != "" and "." in key else name + if key == target_name: + params[key].append(p) + is_custom_key = True + break + if not is_custom_key: + if norm_weight_decay is not None and isinstance(module, norm_classes): + params["norm"].append(p) + else: + params["other"].append(p) + + for child_name, child_module in module.named_children(): + child_prefix = f"{prefix}.{child_name}" if prefix != "" else child_name + _add_params(child_module, prefix=child_prefix) + + _add_params(model) + + param_groups = [] + for key in params: + if len(params[key]) > 0: + param_groups.append({"params": params[key], "weight_decay": params_weight_decay[key]}) + return param_groups \ No newline at end of file diff --git a/exp-4-bert-squad/README-Bert-SQUAD.md b/exp-4-bert-squad/README-Bert-SQUAD.md new file mode 100644 index 0000000..7a04632 --- /dev/null +++ b/exp-4-bert-squad/README-Bert-SQUAD.md @@ -0,0 +1,53 @@ +# SQuAD 1.1 energy consumption + +**Programming language:** +Developed and tested using python 3.7.16 + + +## About the BERT fine-tuning code + +The fine-tuning scripts are the recipe for fine-tuning Bert developed by google-research, it can be found on github at ``google-research/bert/``. We have modified the original scripts as described in the file ``documentation_tf_update.md`` located in this folder. + + + +## Download the BERT model +https://github.com/google-research/bert +*Model specification:* **BERT-Base, Uncased** + +## Download the SQUADv1.1 dataset +https://github.com/google-research/bert +- train-v1.1.json +- dev-v1.1.json +- evaluate-v1.1.py + + +## Detail on the creation of the virtual environment and tensorflow installation + +Below is the detail on how we have created the conda environment and installed tensorflow at the time. + +Source: https://www.tensorflow.org/install/pip + +```Shellsession +demo> conda create -n cenv_tf python=3.7 + +demo> conda activate cenv_tf + +demo> conda install -c conda-forge cudatoolkit=11.8.0 + +demo> pip install nvidia-cudnn-cu11 + +demo> pip install tensorflow + +demo> mkdir -p $CONDA_PREFIX/etc/conda/activate.d + +demo> echo 'CUDNN_PATH=$(dirname $(python -c "import nvidia.cudnn;print(nvidia.cudnn.__file__)"))' >> $CONDA_PREFIX/etc/conda/activate.d/env_vars.sh + +demo> echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$CONDA_PREFIX/lib/:$CUDNN_PATH/lib' >> $CONDA_PREFIX/etc/conda/activate.d/env_vars.sh + +demo> source $CONDA_PREFIX/etc/conda/activate.d/env_vars.sh +``` + +Verify install: +```Shellsession +demo> python3 -c "import tensorflow as tf; print(tf.config.list_physical_devices('GPU'))" +``` \ No newline at end of file diff --git a/exp-4-bert-squad/__init__.py b/exp-4-bert-squad/__init__.py new file mode 100644 index 0000000..effb57b --- /dev/null +++ b/exp-4-bert-squad/__init__.py @@ -0,0 +1,15 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/exp-4-bert-squad/create_pretraining_data.py b/exp-4-bert-squad/create_pretraining_data.py new file mode 100644 index 0000000..aca98ff --- /dev/null +++ b/exp-4-bert-squad/create_pretraining_data.py @@ -0,0 +1,469 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Create masked LM/next sentence masked_lm TF examples for BERT.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import collections +import random +import tokenization +import tensorflow as tf + +flags = tf.flags + +FLAGS = flags.FLAGS + +flags.DEFINE_string("input_file", None, + "Input raw text file (or comma-separated list of files).") + +flags.DEFINE_string( + "output_file", None, + "Output TF example file (or comma-separated list of files).") + +flags.DEFINE_string("vocab_file", None, + "The vocabulary file that the BERT model was trained on.") + +flags.DEFINE_bool( + "do_lower_case", True, + "Whether to lower case the input text. Should be True for uncased " + "models and False for cased models.") + +flags.DEFINE_bool( + "do_whole_word_mask", False, + "Whether to use whole word masking rather than per-WordPiece masking.") + +flags.DEFINE_integer("max_seq_length", 128, "Maximum sequence length.") + +flags.DEFINE_integer("max_predictions_per_seq", 20, + "Maximum number of masked LM predictions per sequence.") + +flags.DEFINE_integer("random_seed", 12345, "Random seed for data generation.") + +flags.DEFINE_integer( + "dupe_factor", 10, + "Number of times to duplicate the input data (with different masks).") + +flags.DEFINE_float("masked_lm_prob", 0.15, "Masked LM probability.") + +flags.DEFINE_float( + "short_seq_prob", 0.1, + "Probability of creating sequences which are shorter than the " + "maximum length.") + + +class TrainingInstance(object): + """A single training instance (sentence pair).""" + + def __init__(self, tokens, segment_ids, masked_lm_positions, masked_lm_labels, + is_random_next): + self.tokens = tokens + self.segment_ids = segment_ids + self.is_random_next = is_random_next + self.masked_lm_positions = masked_lm_positions + self.masked_lm_labels = masked_lm_labels + + def __str__(self): + s = "" + s += "tokens: %s\n" % (" ".join( + [tokenization.printable_text(x) for x in self.tokens])) + s += "segment_ids: %s\n" % (" ".join([str(x) for x in self.segment_ids])) + s += "is_random_next: %s\n" % self.is_random_next + s += "masked_lm_positions: %s\n" % (" ".join( + [str(x) for x in self.masked_lm_positions])) + s += "masked_lm_labels: %s\n" % (" ".join( + [tokenization.printable_text(x) for x in self.masked_lm_labels])) + s += "\n" + return s + + def __repr__(self): + return self.__str__() + + +def write_instance_to_example_files(instances, tokenizer, max_seq_length, + max_predictions_per_seq, output_files): + """Create TF example files from `TrainingInstance`s.""" + writers = [] + for output_file in output_files: + writers.append(tf.io.TFRecordWriter(output_file)) + + writer_index = 0 + + total_written = 0 + for (inst_index, instance) in enumerate(instances): + input_ids = tokenizer.convert_tokens_to_ids(instance.tokens) + input_mask = [1] * len(input_ids) + segment_ids = list(instance.segment_ids) + assert len(input_ids) <= max_seq_length + + while len(input_ids) < max_seq_length: + input_ids.append(0) + input_mask.append(0) + segment_ids.append(0) + + assert len(input_ids) == max_seq_length + assert len(input_mask) == max_seq_length + assert len(segment_ids) == max_seq_length + + masked_lm_positions = list(instance.masked_lm_positions) + masked_lm_ids = tokenizer.convert_tokens_to_ids(instance.masked_lm_labels) + masked_lm_weights = [1.0] * len(masked_lm_ids) + + while len(masked_lm_positions) < max_predictions_per_seq: + masked_lm_positions.append(0) + masked_lm_ids.append(0) + masked_lm_weights.append(0.0) + + next_sentence_label = 1 if instance.is_random_next else 0 + + features = collections.OrderedDict() + features["input_ids"] = create_int_feature(input_ids) + features["input_mask"] = create_int_feature(input_mask) + features["segment_ids"] = create_int_feature(segment_ids) + features["masked_lm_positions"] = create_int_feature(masked_lm_positions) + features["masked_lm_ids"] = create_int_feature(masked_lm_ids) + features["masked_lm_weights"] = create_float_feature(masked_lm_weights) + features["next_sentence_labels"] = create_int_feature([next_sentence_label]) + + tf_example = tf.train.Example(features=tf.train.Features(feature=features)) + + writers[writer_index].write(tf_example.SerializeToString()) + writer_index = (writer_index + 1) % len(writers) + + total_written += 1 + + if inst_index < 20: + tf.compat.v1.logging.info("*** Example ***") + tf.compat.v1.logging.info("tokens: %s" % " ".join( + [tokenization.printable_text(x) for x in instance.tokens])) + + for feature_name in features.keys(): + feature = features[feature_name] + values = [] + if feature.int64_list.value: + values = feature.int64_list.value + elif feature.float_list.value: + values = feature.float_list.value + tf.compat.v1.logging.info( + "%s: %s" % (feature_name, " ".join([str(x) for x in values]))) + + for writer in writers: + writer.close() + + tf.compat.v1.logging.info("Wrote %d total instances", total_written) + + +def create_int_feature(values): + feature = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values))) + return feature + + +def create_float_feature(values): + feature = tf.train.Feature(float_list=tf.train.FloatList(value=list(values))) + return feature + + +def create_training_instances(input_files, tokenizer, max_seq_length, + dupe_factor, short_seq_prob, masked_lm_prob, + max_predictions_per_seq, rng): + """Create `TrainingInstance`s from raw text.""" + all_documents = [[]] + + # Input file format: + # (1) One sentence per line. These should ideally be actual sentences, not + # entire paragraphs or arbitrary spans of text. (Because we use the + # sentence boundaries for the "next sentence prediction" task). + # (2) Blank lines between documents. Document boundaries are needed so + # that the "next sentence prediction" task doesn't span between documents. + for input_file in input_files: + with tf.io.gfile.GFile(input_file, "r") as reader: + while True: + line = tokenization.convert_to_unicode(reader.readline()) + if not line: + break + line = line.strip() + + # Empty lines are used as document delimiters + if not line: + all_documents.append([]) + tokens = tokenizer.tokenize(line) + if tokens: + all_documents[-1].append(tokens) + + # Remove empty documents + all_documents = [x for x in all_documents if x] + rng.shuffle(all_documents) + + vocab_words = list(tokenizer.vocab.keys()) + instances = [] + for _ in range(dupe_factor): + for document_index in range(len(all_documents)): + instances.extend( + create_instances_from_document( + all_documents, document_index, max_seq_length, short_seq_prob, + masked_lm_prob, max_predictions_per_seq, vocab_words, rng)) + + rng.shuffle(instances) + return instances + + +def create_instances_from_document( + all_documents, document_index, max_seq_length, short_seq_prob, + masked_lm_prob, max_predictions_per_seq, vocab_words, rng): + """Creates `TrainingInstance`s for a single document.""" + document = all_documents[document_index] + + # Account for [CLS], [SEP], [SEP] + max_num_tokens = max_seq_length - 3 + + # We *usually* want to fill up the entire sequence since we are padding + # to `max_seq_length` anyways, so short sequences are generally wasted + # computation. However, we *sometimes* + # (i.e., short_seq_prob == 0.1 == 10% of the time) want to use shorter + # sequences to minimize the mismatch between pre-training and fine-tuning. + # The `target_seq_length` is just a rough target however, whereas + # `max_seq_length` is a hard limit. + target_seq_length = max_num_tokens + if rng.random() < short_seq_prob: + target_seq_length = rng.randint(2, max_num_tokens) + + # We DON'T just concatenate all of the tokens from a document into a long + # sequence and choose an arbitrary split point because this would make the + # next sentence prediction task too easy. Instead, we split the input into + # segments "A" and "B" based on the actual "sentences" provided by the user + # input. + instances = [] + current_chunk = [] + current_length = 0 + i = 0 + while i < len(document): + segment = document[i] + current_chunk.append(segment) + current_length += len(segment) + if i == len(document) - 1 or current_length >= target_seq_length: + if current_chunk: + # `a_end` is how many segments from `current_chunk` go into the `A` + # (first) sentence. + a_end = 1 + if len(current_chunk) >= 2: + a_end = rng.randint(1, len(current_chunk) - 1) + + tokens_a = [] + for j in range(a_end): + tokens_a.extend(current_chunk[j]) + + tokens_b = [] + # Random next + is_random_next = False + if len(current_chunk) == 1 or rng.random() < 0.5: + is_random_next = True + target_b_length = target_seq_length - len(tokens_a) + + # This should rarely go for more than one iteration for large + # corpora. However, just to be careful, we try to make sure that + # the random document is not the same as the document + # we're processing. + for _ in range(10): + random_document_index = rng.randint(0, len(all_documents) - 1) + if random_document_index != document_index: + break + + random_document = all_documents[random_document_index] + random_start = rng.randint(0, len(random_document) - 1) + for j in range(random_start, len(random_document)): + tokens_b.extend(random_document[j]) + if len(tokens_b) >= target_b_length: + break + # We didn't actually use these segments so we "put them back" so + # they don't go to waste. + num_unused_segments = len(current_chunk) - a_end + i -= num_unused_segments + # Actual next + else: + is_random_next = False + for j in range(a_end, len(current_chunk)): + tokens_b.extend(current_chunk[j]) + truncate_seq_pair(tokens_a, tokens_b, max_num_tokens, rng) + + assert len(tokens_a) >= 1 + assert len(tokens_b) >= 1 + + tokens = [] + segment_ids = [] + tokens.append("[CLS]") + segment_ids.append(0) + for token in tokens_a: + tokens.append(token) + segment_ids.append(0) + + tokens.append("[SEP]") + segment_ids.append(0) + + for token in tokens_b: + tokens.append(token) + segment_ids.append(1) + tokens.append("[SEP]") + segment_ids.append(1) + + (tokens, masked_lm_positions, + masked_lm_labels) = create_masked_lm_predictions( + tokens, masked_lm_prob, max_predictions_per_seq, vocab_words, rng) + instance = TrainingInstance( + tokens=tokens, + segment_ids=segment_ids, + is_random_next=is_random_next, + masked_lm_positions=masked_lm_positions, + masked_lm_labels=masked_lm_labels) + instances.append(instance) + current_chunk = [] + current_length = 0 + i += 1 + + return instances + + +MaskedLmInstance = collections.namedtuple("MaskedLmInstance", + ["index", "label"]) + + +def create_masked_lm_predictions(tokens, masked_lm_prob, + max_predictions_per_seq, vocab_words, rng): + """Creates the predictions for the masked LM objective.""" + + cand_indexes = [] + for (i, token) in enumerate(tokens): + if token == "[CLS]" or token == "[SEP]": + continue + # Whole Word Masking means that if we mask all of the wordpieces + # corresponding to an original word. When a word has been split into + # WordPieces, the first token does not have any marker and any subsequence + # tokens are prefixed with ##. So whenever we see the ## token, we + # append it to the previous set of word indexes. + # + # Note that Whole Word Masking does *not* change the training code + # at all -- we still predict each WordPiece independently, softmaxed + # over the entire vocabulary. + if (FLAGS.do_whole_word_mask and len(cand_indexes) >= 1 and + token.startswith("##")): + cand_indexes[-1].append(i) + else: + cand_indexes.append([i]) + + rng.shuffle(cand_indexes) + + output_tokens = list(tokens) + + num_to_predict = min(max_predictions_per_seq, + max(1, int(round(len(tokens) * masked_lm_prob)))) + + masked_lms = [] + covered_indexes = set() + for index_set in cand_indexes: + if len(masked_lms) >= num_to_predict: + break + # If adding a whole-word mask would exceed the maximum number of + # predictions, then just skip this candidate. + if len(masked_lms) + len(index_set) > num_to_predict: + continue + is_any_index_covered = False + for index in index_set: + if index in covered_indexes: + is_any_index_covered = True + break + if is_any_index_covered: + continue + for index in index_set: + covered_indexes.add(index) + + masked_token = None + # 80% of the time, replace with [MASK] + if rng.random() < 0.8: + masked_token = "[MASK]" + else: + # 10% of the time, keep original + if rng.random() < 0.5: + masked_token = tokens[index] + # 10% of the time, replace with random word + else: + masked_token = vocab_words[rng.randint(0, len(vocab_words) - 1)] + + output_tokens[index] = masked_token + + masked_lms.append(MaskedLmInstance(index=index, label=tokens[index])) + assert len(masked_lms) <= num_to_predict + masked_lms = sorted(masked_lms, key=lambda x: x.index) + + masked_lm_positions = [] + masked_lm_labels = [] + for p in masked_lms: + masked_lm_positions.append(p.index) + masked_lm_labels.append(p.label) + + return (output_tokens, masked_lm_positions, masked_lm_labels) + + +def truncate_seq_pair(tokens_a, tokens_b, max_num_tokens, rng): + """Truncates a pair of sequences to a maximum sequence length.""" + while True: + total_length = len(tokens_a) + len(tokens_b) + if total_length <= max_num_tokens: + break + + trunc_tokens = tokens_a if len(tokens_a) > len(tokens_b) else tokens_b + assert len(trunc_tokens) >= 1 + + # We want to sometimes truncate from the front and sometimes from the + # back to add more randomness and avoid biases. + if rng.random() < 0.5: + del trunc_tokens[0] + else: + trunc_tokens.pop() + + +def main(_): + tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.INFO) + + tokenizer = tokenization.FullTokenizer( + vocab_file=FLAGS.vocab_file, do_lower_case=FLAGS.do_lower_case) + + input_files = [] + for input_pattern in FLAGS.input_file.split(","): + input_files.extend(tf.io.gfile.glob(input_pattern)) + + tf.compat.v1.logging.info("*** Reading from input files ***") + for input_file in input_files: + tf.compat.v1.logging.info(" %s", input_file) + + rng = random.Random(FLAGS.random_seed) + instances = create_training_instances( + input_files, tokenizer, FLAGS.max_seq_length, FLAGS.dupe_factor, + FLAGS.short_seq_prob, FLAGS.masked_lm_prob, FLAGS.max_predictions_per_seq, + rng) + + output_files = FLAGS.output_file.split(",") + tf.compat.v1.logging.info("*** Writing to output files ***") + for output_file in output_files: + tf.compat.v1.logging.info(" %s", output_file) + + write_instance_to_example_files(instances, tokenizer, FLAGS.max_seq_length, + FLAGS.max_predictions_per_seq, output_files) + + +if __name__ == "__main__": + flags.mark_flag_as_required("input_file") + flags.mark_flag_as_required("output_file") + flags.mark_flag_as_required("vocab_file") + tf.compat.v1.app.run() diff --git a/exp-4-bert-squad/documentation_tf_update.md b/exp-4-bert-squad/documentation_tf_update.md new file mode 100644 index 0000000..4edbeca --- /dev/null +++ b/exp-4-bert-squad/documentation_tf_update.md @@ -0,0 +1,40 @@ +# SQuAD 1.1 source code update + +Original source code which is used to fine-tune BERT model for SQuAD dataset was prepared in tensorflow version 1.11.0. To start the training process locally using GPU, the Compute Unified Device Architecture (CUDA) is needed. Most of the GPUs do not support CUDA version which is required for older versions of tensorflow (1.11.0) anymore and to get the best possible performance from GPU during the training process, there is a recommendation to get up to date version of CUDA, which is supported by used GPU. That is the reason to update the source code and use tensorflow version 2.0 or higher. + +## Update code +### 1. Use command +There is an official way to update code from tensorflow version 1.x to 2.x provided by Tensorflow directly. +*Source*: https://www.tensorflow.org/guide/migrate/upgrade +*Command:* +``` +tf_upgrade_v2 \  +--intree \ +--outtree \ +--reportfile report.txt +``` +### 2. Complete code update manually +Additional changes are inevitable because of specific source code structure and functions which are not transformed automatically in previous step. + +*Replacements in* **run_squad.py**: +``` +slim.tpu -> tf.compat.v1.estimator.tpu +``` +``` +tf.flags -> tf.compat.v1.flags +``` +``` +tf.io.gfile.Open -> tf.io.gfile.GFile +``` +``` +slim.data.map_and_batch -> tf.data.experimental.map_and_batch +``` + +*Replacements in* **modelling.py**: +Replace the whole function *layer_norm()*. +``` +def layer_norm(input_tensor, name=None): +  """Run layer normalization on the last dimension of the tensor.""" +  layer_norma = tf.keras.layers.LayerNormalization(axis = -1) +  return layer_norma(input_tensor) +``` \ No newline at end of file diff --git a/exp-4-bert-squad/extract_features.py b/exp-4-bert-squad/extract_features.py new file mode 100644 index 0000000..3d23687 --- /dev/null +++ b/exp-4-bert-squad/extract_features.py @@ -0,0 +1,419 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Extract pre-computed feature vectors from BERT.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import codecs +import collections +import json +import re + +import modeling +import tokenization +import tensorflow as tf + +flags = tf.flags + +FLAGS = flags.FLAGS + +flags.DEFINE_string("input_file", None, "") + +flags.DEFINE_string("output_file", None, "") + +flags.DEFINE_string("layers", "-1,-2,-3,-4", "") + +flags.DEFINE_string( + "bert_config_file", None, + "The config json file corresponding to the pre-trained BERT model. " + "This specifies the model architecture.") + +flags.DEFINE_integer( + "max_seq_length", 128, + "The maximum total input sequence length after WordPiece tokenization. " + "Sequences longer than this will be truncated, and sequences shorter " + "than this will be padded.") + +flags.DEFINE_string( + "init_checkpoint", None, + "Initial checkpoint (usually from a pre-trained BERT model).") + +flags.DEFINE_string("vocab_file", None, + "The vocabulary file that the BERT model was trained on.") + +flags.DEFINE_bool( + "do_lower_case", True, + "Whether to lower case the input text. Should be True for uncased " + "models and False for cased models.") + +flags.DEFINE_integer("batch_size", 32, "Batch size for predictions.") + +flags.DEFINE_bool("use_tpu", False, "Whether to use TPU or GPU/CPU.") + +flags.DEFINE_string("master", None, + "If using a TPU, the address of the master.") + +flags.DEFINE_integer( + "num_tpu_cores", 8, + "Only used if `use_tpu` is True. Total number of TPU cores to use.") + +flags.DEFINE_bool( + "use_one_hot_embeddings", False, + "If True, tf.one_hot will be used for embedding lookups, otherwise " + "tf.nn.embedding_lookup will be used. On TPUs, this should be True " + "since it is much faster.") + + +class InputExample(object): + + def __init__(self, unique_id, text_a, text_b): + self.unique_id = unique_id + self.text_a = text_a + self.text_b = text_b + + +class InputFeatures(object): + """A single set of features of data.""" + + def __init__(self, unique_id, tokens, input_ids, input_mask, input_type_ids): + self.unique_id = unique_id + self.tokens = tokens + self.input_ids = input_ids + self.input_mask = input_mask + self.input_type_ids = input_type_ids + + +def input_fn_builder(features, seq_length): + """Creates an `input_fn` closure to be passed to TPUEstimator.""" + + all_unique_ids = [] + all_input_ids = [] + all_input_mask = [] + all_input_type_ids = [] + + for feature in features: + all_unique_ids.append(feature.unique_id) + all_input_ids.append(feature.input_ids) + all_input_mask.append(feature.input_mask) + all_input_type_ids.append(feature.input_type_ids) + + def input_fn(params): + """The actual input function.""" + batch_size = params["batch_size"] + + num_examples = len(features) + + # This is for demo purposes and does NOT scale to large data sets. We do + # not use Dataset.from_generator() because that uses tf.py_func which is + # not TPU compatible. The right way to load data is with TFRecordReader. + d = tf.data.Dataset.from_tensor_slices({ + "unique_ids": + tf.constant(all_unique_ids, shape=[num_examples], dtype=tf.int32), + "input_ids": + tf.constant( + all_input_ids, shape=[num_examples, seq_length], + dtype=tf.int32), + "input_mask": + tf.constant( + all_input_mask, + shape=[num_examples, seq_length], + dtype=tf.int32), + "input_type_ids": + tf.constant( + all_input_type_ids, + shape=[num_examples, seq_length], + dtype=tf.int32), + }) + + d = d.batch(batch_size=batch_size, drop_remainder=False) + return d + + return input_fn + + +def model_fn_builder(bert_config, init_checkpoint, layer_indexes, use_tpu, + use_one_hot_embeddings): + """Returns `model_fn` closure for TPUEstimator.""" + + def model_fn(features, labels, mode, params): # pylint: disable=unused-argument + """The `model_fn` for TPUEstimator.""" + + unique_ids = features["unique_ids"] + input_ids = features["input_ids"] + input_mask = features["input_mask"] + input_type_ids = features["input_type_ids"] + + model = modeling.BertModel( + config=bert_config, + is_training=False, + input_ids=input_ids, + input_mask=input_mask, + token_type_ids=input_type_ids, + use_one_hot_embeddings=use_one_hot_embeddings) + + if mode != tf.estimator.ModeKeys.PREDICT: + raise ValueError("Only PREDICT modes are supported: %s" % (mode)) + + tvars = tf.compat.v1.trainable_variables() + scaffold_fn = None + (assignment_map, + initialized_variable_names) = modeling.get_assignment_map_from_checkpoint( + tvars, init_checkpoint) + if use_tpu: + + def tpu_scaffold(): + tf.compat.v1.train.init_from_checkpoint(init_checkpoint, assignment_map) + return tf.compat.v1.train.Scaffold() + + scaffold_fn = tpu_scaffold + else: + tf.compat.v1.train.init_from_checkpoint(init_checkpoint, assignment_map) + + tf.compat.v1.logging.info("**** Trainable Variables ****") + for var in tvars: + init_string = "" + if var.name in initialized_variable_names: + init_string = ", *INIT_FROM_CKPT*" + tf.compat.v1.logging.info(" name = %s, shape = %s%s", var.name, var.shape, + init_string) + + all_layers = model.get_all_encoder_layers() + + predictions = { + "unique_id": unique_ids, + } + + for (i, layer_index) in enumerate(layer_indexes): + predictions["layer_output_%d" % i] = all_layers[layer_index] + + output_spec = tf.compat.v1.estimator.tpu.TPUEstimatorSpec( + mode=mode, predictions=predictions, scaffold_fn=scaffold_fn) + return output_spec + + return model_fn + + +def convert_examples_to_features(examples, seq_length, tokenizer): + """Loads a data file into a list of `InputBatch`s.""" + + features = [] + for (ex_index, example) in enumerate(examples): + tokens_a = tokenizer.tokenize(example.text_a) + + tokens_b = None + if example.text_b: + tokens_b = tokenizer.tokenize(example.text_b) + + if tokens_b: + # Modifies `tokens_a` and `tokens_b` in place so that the total + # length is less than the specified length. + # Account for [CLS], [SEP], [SEP] with "- 3" + _truncate_seq_pair(tokens_a, tokens_b, seq_length - 3) + else: + # Account for [CLS] and [SEP] with "- 2" + if len(tokens_a) > seq_length - 2: + tokens_a = tokens_a[0:(seq_length - 2)] + + # The convention in BERT is: + # (a) For sequence pairs: + # tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP] + # type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1 + # (b) For single sequences: + # tokens: [CLS] the dog is hairy . [SEP] + # type_ids: 0 0 0 0 0 0 0 + # + # Where "type_ids" are used to indicate whether this is the first + # sequence or the second sequence. The embedding vectors for `type=0` and + # `type=1` were learned during pre-training and are added to the wordpiece + # embedding vector (and position vector). This is not *strictly* necessary + # since the [SEP] token unambiguously separates the sequences, but it makes + # it easier for the model to learn the concept of sequences. + # + # For classification tasks, the first vector (corresponding to [CLS]) is + # used as as the "sentence vector". Note that this only makes sense because + # the entire model is fine-tuned. + tokens = [] + input_type_ids = [] + tokens.append("[CLS]") + input_type_ids.append(0) + for token in tokens_a: + tokens.append(token) + input_type_ids.append(0) + tokens.append("[SEP]") + input_type_ids.append(0) + + if tokens_b: + for token in tokens_b: + tokens.append(token) + input_type_ids.append(1) + tokens.append("[SEP]") + input_type_ids.append(1) + + input_ids = tokenizer.convert_tokens_to_ids(tokens) + + # The mask has 1 for real tokens and 0 for padding tokens. Only real + # tokens are attended to. + input_mask = [1] * len(input_ids) + + # Zero-pad up to the sequence length. + while len(input_ids) < seq_length: + input_ids.append(0) + input_mask.append(0) + input_type_ids.append(0) + + assert len(input_ids) == seq_length + assert len(input_mask) == seq_length + assert len(input_type_ids) == seq_length + + if ex_index < 5: + tf.compat.v1.logging.info("*** Example ***") + tf.compat.v1.logging.info("unique_id: %s" % (example.unique_id)) + tf.compat.v1.logging.info("tokens: %s" % " ".join( + [tokenization.printable_text(x) for x in tokens])) + tf.compat.v1.logging.info("input_ids: %s" % " ".join([str(x) for x in input_ids])) + tf.compat.v1.logging.info("input_mask: %s" % " ".join([str(x) for x in input_mask])) + tf.compat.v1.logging.info( + "input_type_ids: %s" % " ".join([str(x) for x in input_type_ids])) + + features.append( + InputFeatures( + unique_id=example.unique_id, + tokens=tokens, + input_ids=input_ids, + input_mask=input_mask, + input_type_ids=input_type_ids)) + return features + + +def _truncate_seq_pair(tokens_a, tokens_b, max_length): + """Truncates a sequence pair in place to the maximum length.""" + + # This is a simple heuristic which will always truncate the longer sequence + # one token at a time. This makes more sense than truncating an equal percent + # of tokens from each, since if one sequence is very short then each token + # that's truncated likely contains more information than a longer sequence. + while True: + total_length = len(tokens_a) + len(tokens_b) + if total_length <= max_length: + break + if len(tokens_a) > len(tokens_b): + tokens_a.pop() + else: + tokens_b.pop() + + +def read_examples(input_file): + """Read a list of `InputExample`s from an input file.""" + examples = [] + unique_id = 0 + with tf.io.gfile.GFile(input_file, "r") as reader: + while True: + line = tokenization.convert_to_unicode(reader.readline()) + if not line: + break + line = line.strip() + text_a = None + text_b = None + m = re.match(r"^(.*) \|\|\| (.*)$", line) + if m is None: + text_a = line + else: + text_a = m.group(1) + text_b = m.group(2) + examples.append( + InputExample(unique_id=unique_id, text_a=text_a, text_b=text_b)) + unique_id += 1 + return examples + + +def main(_): + tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.INFO) + + layer_indexes = [int(x) for x in FLAGS.layers.split(",")] + + bert_config = modeling.BertConfig.from_json_file(FLAGS.bert_config_file) + + tokenizer = tokenization.FullTokenizer( + vocab_file=FLAGS.vocab_file, do_lower_case=FLAGS.do_lower_case) + + is_per_host = tf.compat.v1.estimator.tpu.InputPipelineConfig.PER_HOST_V2 + run_config = tf.compat.v1.estimator.tpu.RunConfig( + master=FLAGS.master, + tpu_config=tf.compat.v1.estimator.tpu.TPUConfig( + num_shards=FLAGS.num_tpu_cores, + per_host_input_for_training=is_per_host)) + + examples = read_examples(FLAGS.input_file) + + features = convert_examples_to_features( + examples=examples, seq_length=FLAGS.max_seq_length, tokenizer=tokenizer) + + unique_id_to_feature = {} + for feature in features: + unique_id_to_feature[feature.unique_id] = feature + + model_fn = model_fn_builder( + bert_config=bert_config, + init_checkpoint=FLAGS.init_checkpoint, + layer_indexes=layer_indexes, + use_tpu=FLAGS.use_tpu, + use_one_hot_embeddings=FLAGS.use_one_hot_embeddings) + + # If TPU is not available, this will fall back to normal Estimator on CPU + # or GPU. + estimator = tf.compat.v1.estimator.tpu.TPUEstimator( + use_tpu=FLAGS.use_tpu, + model_fn=model_fn, + config=run_config, + predict_batch_size=FLAGS.batch_size) + + input_fn = input_fn_builder( + features=features, seq_length=FLAGS.max_seq_length) + + with codecs.getwriter("utf-8")(tf.io.gfile.GFile(FLAGS.output_file, + "w")) as writer: + for result in estimator.predict(input_fn, yield_single_examples=True): + unique_id = int(result["unique_id"]) + feature = unique_id_to_feature[unique_id] + output_json = collections.OrderedDict() + output_json["linex_index"] = unique_id + all_features = [] + for (i, token) in enumerate(feature.tokens): + all_layers = [] + for (j, layer_index) in enumerate(layer_indexes): + layer_output = result["layer_output_%d" % j] + layers = collections.OrderedDict() + layers["index"] = layer_index + layers["values"] = [ + round(float(x), 6) for x in layer_output[i:(i + 1)].flat + ] + all_layers.append(layers) + features = collections.OrderedDict() + features["token"] = token + features["layers"] = all_layers + all_features.append(features) + output_json["features"] = all_features + writer.write(json.dumps(output_json) + "\n") + + +if __name__ == "__main__": + flags.mark_flag_as_required("input_file") + flags.mark_flag_as_required("vocab_file") + flags.mark_flag_as_required("bert_config_file") + flags.mark_flag_as_required("init_checkpoint") + flags.mark_flag_as_required("output_file") + tf.compat.v1.app.run() diff --git a/exp-4-bert-squad/modeling.py b/exp-4-bert-squad/modeling.py new file mode 100644 index 0000000..6330d85 --- /dev/null +++ b/exp-4-bert-squad/modeling.py @@ -0,0 +1,994 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The main BERT model and related functions.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import collections +import copy +import json +import math +import re +import numpy as np +import six +import tensorflow as tf + + +class BertConfig(object): + """Configuration for `BertModel`.""" + + def __init__(self, + vocab_size, + hidden_size=768, + num_hidden_layers=12, + num_attention_heads=12, + intermediate_size=3072, + hidden_act="gelu", + hidden_dropout_prob=0.1, + attention_probs_dropout_prob=0.1, + max_position_embeddings=512, + type_vocab_size=16, + initializer_range=0.02): + """Constructs BertConfig. + + Args: + vocab_size: Vocabulary size of `inputs_ids` in `BertModel`. + hidden_size: Size of the encoder layers and the pooler layer. + num_hidden_layers: Number of hidden layers in the Transformer encoder. + num_attention_heads: Number of attention heads for each attention layer in + the Transformer encoder. + intermediate_size: The size of the "intermediate" (i.e., feed-forward) + layer in the Transformer encoder. + hidden_act: The non-linear activation function (function or string) in the + encoder and pooler. + hidden_dropout_prob: The dropout probability for all fully connected + layers in the embeddings, encoder, and pooler. + attention_probs_dropout_prob: The dropout ratio for the attention + probabilities. + max_position_embeddings: The maximum sequence length that this model might + ever be used with. Typically set this to something large just in case + (e.g., 512 or 1024 or 2048). + type_vocab_size: The vocabulary size of the `token_type_ids` passed into + `BertModel`. + initializer_range: The stdev of the truncated_normal_initializer for + initializing all weight matrices. + """ + self.vocab_size = vocab_size + self.hidden_size = hidden_size + self.num_hidden_layers = num_hidden_layers + self.num_attention_heads = num_attention_heads + self.hidden_act = hidden_act + self.intermediate_size = intermediate_size + self.hidden_dropout_prob = hidden_dropout_prob + self.attention_probs_dropout_prob = attention_probs_dropout_prob + self.max_position_embeddings = max_position_embeddings + self.type_vocab_size = type_vocab_size + self.initializer_range = initializer_range + + @classmethod + def from_dict(cls, json_object): + """Constructs a `BertConfig` from a Python dictionary of parameters.""" + config = BertConfig(vocab_size=None) + for (key, value) in six.iteritems(json_object): + config.__dict__[key] = value + return config + + @classmethod + def from_json_file(cls, json_file): + """Constructs a `BertConfig` from a json file of parameters.""" + with tf.io.gfile.GFile(json_file, "r") as reader: + text = reader.read() + return cls.from_dict(json.loads(text)) + + def to_dict(self): + """Serializes this instance to a Python dictionary.""" + output = copy.deepcopy(self.__dict__) + return output + + def to_json_string(self): + """Serializes this instance to a JSON string.""" + return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n" + + +class BertModel(object): + """BERT model ("Bidirectional Encoder Representations from Transformers"). + + Example usage: + + ```python + # Already been converted into WordPiece token ids + input_ids = tf.constant([[31, 51, 99], [15, 5, 0]]) + input_mask = tf.constant([[1, 1, 1], [1, 1, 0]]) + token_type_ids = tf.constant([[0, 0, 1], [0, 2, 0]]) + + config = modeling.BertConfig(vocab_size=32000, hidden_size=512, + num_hidden_layers=8, num_attention_heads=6, intermediate_size=1024) + + model = modeling.BertModel(config=config, is_training=True, + input_ids=input_ids, input_mask=input_mask, token_type_ids=token_type_ids) + + label_embeddings = tf.get_variable(...) + pooled_output = model.get_pooled_output() + logits = tf.matmul(pooled_output, label_embeddings) + ... + ``` + """ + + def __init__(self, + config, + is_training, + input_ids, + input_mask=None, + token_type_ids=None, + use_one_hot_embeddings=False, + scope=None): + """Constructor for BertModel. + + Args: + config: `BertConfig` instance. + is_training: bool. true for training model, false for eval model. Controls + whether dropout will be applied. + input_ids: int32 Tensor of shape [batch_size, seq_length]. + input_mask: (optional) int32 Tensor of shape [batch_size, seq_length]. + token_type_ids: (optional) int32 Tensor of shape [batch_size, seq_length]. + use_one_hot_embeddings: (optional) bool. Whether to use one-hot word + embeddings or tf.embedding_lookup() for the word embeddings. + scope: (optional) variable scope. Defaults to "bert". + + Raises: + ValueError: The config is invalid or one of the input tensor shapes + is invalid. + """ + config = copy.deepcopy(config) + if not is_training: + config.hidden_dropout_prob = 0.0 + config.attention_probs_dropout_prob = 0.0 + + input_shape = get_shape_list(input_ids, expected_rank=2) + batch_size = input_shape[0] + seq_length = input_shape[1] + + if input_mask is None: + input_mask = tf.ones(shape=[batch_size, seq_length], dtype=tf.int32) + + if token_type_ids is None: + token_type_ids = tf.zeros(shape=[batch_size, seq_length], dtype=tf.int32) + + with tf.compat.v1.variable_scope(scope, default_name="bert"): + with tf.compat.v1.variable_scope("embeddings"): + # Perform embedding lookup on the word ids. + (self.embedding_output, self.embedding_table) = embedding_lookup( + input_ids=input_ids, + vocab_size=config.vocab_size, + embedding_size=config.hidden_size, + initializer_range=config.initializer_range, + word_embedding_name="word_embeddings", + use_one_hot_embeddings=use_one_hot_embeddings) + + # Add positional embeddings and token type embeddings, then layer + # normalize and perform dropout. + self.embedding_output = embedding_postprocessor( + input_tensor=self.embedding_output, + use_token_type=True, + token_type_ids=token_type_ids, + token_type_vocab_size=config.type_vocab_size, + token_type_embedding_name="token_type_embeddings", + use_position_embeddings=True, + position_embedding_name="position_embeddings", + initializer_range=config.initializer_range, + max_position_embeddings=config.max_position_embeddings, + dropout_prob=config.hidden_dropout_prob) + + with tf.compat.v1.variable_scope("encoder"): + # This converts a 2D mask of shape [batch_size, seq_length] to a 3D + # mask of shape [batch_size, seq_length, seq_length] which is used + # for the attention scores. + attention_mask = create_attention_mask_from_input_mask( + input_ids, input_mask) + + # Run the stacked transformer. + # `sequence_output` shape = [batch_size, seq_length, hidden_size]. + self.all_encoder_layers = transformer_model( + input_tensor=self.embedding_output, + attention_mask=attention_mask, + hidden_size=config.hidden_size, + num_hidden_layers=config.num_hidden_layers, + num_attention_heads=config.num_attention_heads, + intermediate_size=config.intermediate_size, + intermediate_act_fn=get_activation(config.hidden_act), + hidden_dropout_prob=config.hidden_dropout_prob, + attention_probs_dropout_prob=config.attention_probs_dropout_prob, + initializer_range=config.initializer_range, + do_return_all_layers=True) + + self.sequence_output = self.all_encoder_layers[-1] + # The "pooler" converts the encoded sequence tensor of shape + # [batch_size, seq_length, hidden_size] to a tensor of shape + # [batch_size, hidden_size]. This is necessary for segment-level + # (or segment-pair-level) classification tasks where we need a fixed + # dimensional representation of the segment. + with tf.compat.v1.variable_scope("pooler"): + # We "pool" the model by simply taking the hidden state corresponding + # to the first token. We assume that this has been pre-trained + first_token_tensor = tf.squeeze(self.sequence_output[:, 0:1, :], axis=1) + self.pooled_output = tf.compat.v1.layers.dense( + first_token_tensor, + config.hidden_size, + activation=tf.tanh, + kernel_initializer=create_initializer(config.initializer_range)) + + def get_pooled_output(self): + return self.pooled_output + + def get_sequence_output(self): + """Gets final hidden layer of encoder. + + Returns: + float Tensor of shape [batch_size, seq_length, hidden_size] corresponding + to the final hidden of the transformer encoder. + """ + return self.sequence_output + + def get_all_encoder_layers(self): + return self.all_encoder_layers + + def get_embedding_output(self): + """Gets output of the embedding lookup (i.e., input to the transformer). + + Returns: + float Tensor of shape [batch_size, seq_length, hidden_size] corresponding + to the output of the embedding layer, after summing the word + embeddings with the positional embeddings and the token type embeddings, + then performing layer normalization. This is the input to the transformer. + """ + return self.embedding_output + + def get_embedding_table(self): + return self.embedding_table + + +def gelu(x): + """Gaussian Error Linear Unit. + + This is a smoother version of the RELU. + Original paper: https://arxiv.org/abs/1606.08415 + Args: + x: float Tensor to perform activation. + + Returns: + `x` with the GELU activation applied. + """ + cdf = 0.5 * (1.0 + tf.tanh( + (np.sqrt(2 / np.pi) * (x + 0.044715 * tf.pow(x, 3))))) + return x * cdf + + +def get_activation(activation_string): + """Maps a string to a Python function, e.g., "relu" => `tf.nn.relu`. + + Args: + activation_string: String name of the activation function. + + Returns: + A Python function corresponding to the activation function. If + `activation_string` is None, empty, or "linear", this will return None. + If `activation_string` is not a string, it will return `activation_string`. + + Raises: + ValueError: The `activation_string` does not correspond to a known + activation. + """ + + # We assume that anything that"s not a string is already an activation + # function, so we just return it. + if not isinstance(activation_string, six.string_types): + return activation_string + + if not activation_string: + return None + + act = activation_string.lower() + if act == "linear": + return None + elif act == "relu": + return tf.nn.relu + elif act == "gelu": + return gelu + elif act == "tanh": + return tf.tanh + else: + raise ValueError("Unsupported activation: %s" % act) + + +def get_assignment_map_from_checkpoint(tvars, init_checkpoint): + """Compute the union of the current variables and checkpoint variables.""" + assignment_map = {} + initialized_variable_names = {} + + name_to_variable = collections.OrderedDict() + for var in tvars: + name = var.name + m = re.match("^(.*):\\d+$", name) + if m is not None: + name = m.group(1) + name_to_variable[name] = var + + init_vars = tf.train.list_variables(init_checkpoint) + + assignment_map = collections.OrderedDict() + for x in init_vars: + (name, var) = (x[0], x[1]) + if name not in name_to_variable: + continue + assignment_map[name] = name + initialized_variable_names[name] = 1 + initialized_variable_names[name + ":0"] = 1 + + return (assignment_map, initialized_variable_names) + + +def dropout(input_tensor, dropout_prob): + """Perform dropout. + + Args: + input_tensor: float Tensor. + dropout_prob: Python float. The probability of dropping out a value (NOT of + *keeping* a dimension as in `tf.nn.dropout`). + + Returns: + A version of `input_tensor` with dropout applied. + """ + if dropout_prob is None or dropout_prob == 0.0: + return input_tensor + + output = tf.nn.dropout(input_tensor, rate=1 - (1.0 - dropout_prob)) + return output + + +''' +def layer_norm(input_tensor, name=None): + """Run layer normalization on the last dimension of the tensor.""" + return tf.compat.v1.layers.layer_norm( + inputs=input_tensor, begin_norm_axis=-1, begin_params_axis=-1, scope=name) +''' + + +def layer_norm(input_tensor, name=None): + """Run layer normalization on the last dimension of the tensor.""" + layer_norma = tf.keras.layers.LayerNormalization(axis = -1) + return layer_norma(input_tensor) + + +def layer_norm_and_dropout(input_tensor, dropout_prob, name=None): + """Runs layer normalization followed by dropout.""" + output_tensor = layer_norm(input_tensor, name) + output_tensor = dropout(output_tensor, dropout_prob) + return output_tensor + + +def create_initializer(initializer_range=0.02): + """Creates a `truncated_normal_initializer` with the given range.""" + return tf.compat.v1.truncated_normal_initializer(stddev=initializer_range) + + +def embedding_lookup(input_ids, + vocab_size, + embedding_size=128, + initializer_range=0.02, + word_embedding_name="word_embeddings", + use_one_hot_embeddings=False): + """Looks up words embeddings for id tensor. + + Args: + input_ids: int32 Tensor of shape [batch_size, seq_length] containing word + ids. + vocab_size: int. Size of the embedding vocabulary. + embedding_size: int. Width of the word embeddings. + initializer_range: float. Embedding initialization range. + word_embedding_name: string. Name of the embedding table. + use_one_hot_embeddings: bool. If True, use one-hot method for word + embeddings. If False, use `tf.gather()`. + + Returns: + float Tensor of shape [batch_size, seq_length, embedding_size]. + """ + # This function assumes that the input is of shape [batch_size, seq_length, + # num_inputs]. + # + # If the input is a 2D tensor of shape [batch_size, seq_length], we + # reshape to [batch_size, seq_length, 1]. + if input_ids.shape.ndims == 2: + input_ids = tf.expand_dims(input_ids, axis=[-1]) + + embedding_table = tf.compat.v1.get_variable( + name=word_embedding_name, + shape=[vocab_size, embedding_size], + initializer=create_initializer(initializer_range)) + + flat_input_ids = tf.reshape(input_ids, [-1]) + if use_one_hot_embeddings: + one_hot_input_ids = tf.one_hot(flat_input_ids, depth=vocab_size) + output = tf.matmul(one_hot_input_ids, embedding_table) + else: + output = tf.gather(embedding_table, flat_input_ids) + + input_shape = get_shape_list(input_ids) + + output = tf.reshape(output, + input_shape[0:-1] + [input_shape[-1] * embedding_size]) + return (output, embedding_table) + + +def embedding_postprocessor(input_tensor, + use_token_type=False, + token_type_ids=None, + token_type_vocab_size=16, + token_type_embedding_name="token_type_embeddings", + use_position_embeddings=True, + position_embedding_name="position_embeddings", + initializer_range=0.02, + max_position_embeddings=512, + dropout_prob=0.1): + """Performs various post-processing on a word embedding tensor. + + Args: + input_tensor: float Tensor of shape [batch_size, seq_length, + embedding_size]. + use_token_type: bool. Whether to add embeddings for `token_type_ids`. + token_type_ids: (optional) int32 Tensor of shape [batch_size, seq_length]. + Must be specified if `use_token_type` is True. + token_type_vocab_size: int. The vocabulary size of `token_type_ids`. + token_type_embedding_name: string. The name of the embedding table variable + for token type ids. + use_position_embeddings: bool. Whether to add position embeddings for the + position of each token in the sequence. + position_embedding_name: string. The name of the embedding table variable + for positional embeddings. + initializer_range: float. Range of the weight initialization. + max_position_embeddings: int. Maximum sequence length that might ever be + used with this model. This can be longer than the sequence length of + input_tensor, but cannot be shorter. + dropout_prob: float. Dropout probability applied to the final output tensor. + + Returns: + float tensor with same shape as `input_tensor`. + + Raises: + ValueError: One of the tensor shapes or input values is invalid. + """ + input_shape = get_shape_list(input_tensor, expected_rank=3) + batch_size = input_shape[0] + seq_length = input_shape[1] + width = input_shape[2] + + output = input_tensor + + if use_token_type: + if token_type_ids is None: + raise ValueError("`token_type_ids` must be specified if" + "`use_token_type` is True.") + token_type_table = tf.compat.v1.get_variable( + name=token_type_embedding_name, + shape=[token_type_vocab_size, width], + initializer=create_initializer(initializer_range)) + # This vocab will be small so we always do one-hot here, since it is always + # faster for a small vocabulary. + flat_token_type_ids = tf.reshape(token_type_ids, [-1]) + one_hot_ids = tf.one_hot(flat_token_type_ids, depth=token_type_vocab_size) + token_type_embeddings = tf.matmul(one_hot_ids, token_type_table) + token_type_embeddings = tf.reshape(token_type_embeddings, + [batch_size, seq_length, width]) + output += token_type_embeddings + + if use_position_embeddings: + assert_op = tf.compat.v1.assert_less_equal(seq_length, max_position_embeddings) + with tf.control_dependencies([assert_op]): + full_position_embeddings = tf.compat.v1.get_variable( + name=position_embedding_name, + shape=[max_position_embeddings, width], + initializer=create_initializer(initializer_range)) + # Since the position embedding table is a learned variable, we create it + # using a (long) sequence length `max_position_embeddings`. The actual + # sequence length might be shorter than this, for faster training of + # tasks that do not have long sequences. + # + # So `full_position_embeddings` is effectively an embedding table + # for position [0, 1, 2, ..., max_position_embeddings-1], and the current + # sequence has positions [0, 1, 2, ... seq_length-1], so we can just + # perform a slice. + position_embeddings = tf.slice(full_position_embeddings, [0, 0], + [seq_length, -1]) + num_dims = len(output.shape.as_list()) + + # Only the last two dimensions are relevant (`seq_length` and `width`), so + # we broadcast among the first dimensions, which is typically just + # the batch size. + position_broadcast_shape = [] + for _ in range(num_dims - 2): + position_broadcast_shape.append(1) + position_broadcast_shape.extend([seq_length, width]) + position_embeddings = tf.reshape(position_embeddings, + position_broadcast_shape) + output += position_embeddings + + output = layer_norm_and_dropout(output, dropout_prob) + return output + + +def create_attention_mask_from_input_mask(from_tensor, to_mask): + """Create 3D attention mask from a 2D tensor mask. + + Args: + from_tensor: 2D or 3D Tensor of shape [batch_size, from_seq_length, ...]. + to_mask: int32 Tensor of shape [batch_size, to_seq_length]. + + Returns: + float Tensor of shape [batch_size, from_seq_length, to_seq_length]. + """ + from_shape = get_shape_list(from_tensor, expected_rank=[2, 3]) + batch_size = from_shape[0] + from_seq_length = from_shape[1] + + to_shape = get_shape_list(to_mask, expected_rank=2) + to_seq_length = to_shape[1] + + to_mask = tf.cast( + tf.reshape(to_mask, [batch_size, 1, to_seq_length]), tf.float32) + + # We don't assume that `from_tensor` is a mask (although it could be). We + # don't actually care if we attend *from* padding tokens (only *to* padding) + # tokens so we create a tensor of all ones. + # + # `broadcast_ones` = [batch_size, from_seq_length, 1] + broadcast_ones = tf.ones( + shape=[batch_size, from_seq_length, 1], dtype=tf.float32) + + # Here we broadcast along two dimensions to create the mask. + mask = broadcast_ones * to_mask + + return mask + + +def attention_layer(from_tensor, + to_tensor, + attention_mask=None, + num_attention_heads=1, + size_per_head=512, + query_act=None, + key_act=None, + value_act=None, + attention_probs_dropout_prob=0.0, + initializer_range=0.02, + do_return_2d_tensor=False, + batch_size=None, + from_seq_length=None, + to_seq_length=None): + """Performs multi-headed attention from `from_tensor` to `to_tensor`. + + This is an implementation of multi-headed attention based on "Attention + is all you Need". If `from_tensor` and `to_tensor` are the same, then + this is self-attention. Each timestep in `from_tensor` attends to the + corresponding sequence in `to_tensor`, and returns a fixed-with vector. + + This function first projects `from_tensor` into a "query" tensor and + `to_tensor` into "key" and "value" tensors. These are (effectively) a list + of tensors of length `num_attention_heads`, where each tensor is of shape + [batch_size, seq_length, size_per_head]. + + Then, the query and key tensors are dot-producted and scaled. These are + softmaxed to obtain attention probabilities. The value tensors are then + interpolated by these probabilities, then concatenated back to a single + tensor and returned. + + In practice, the multi-headed attention are done with transposes and + reshapes rather than actual separate tensors. + + Args: + from_tensor: float Tensor of shape [batch_size, from_seq_length, + from_width]. + to_tensor: float Tensor of shape [batch_size, to_seq_length, to_width]. + attention_mask: (optional) int32 Tensor of shape [batch_size, + from_seq_length, to_seq_length]. The values should be 1 or 0. The + attention scores will effectively be set to -infinity for any positions in + the mask that are 0, and will be unchanged for positions that are 1. + num_attention_heads: int. Number of attention heads. + size_per_head: int. Size of each attention head. + query_act: (optional) Activation function for the query transform. + key_act: (optional) Activation function for the key transform. + value_act: (optional) Activation function for the value transform. + attention_probs_dropout_prob: (optional) float. Dropout probability of the + attention probabilities. + initializer_range: float. Range of the weight initializer. + do_return_2d_tensor: bool. If True, the output will be of shape [batch_size + * from_seq_length, num_attention_heads * size_per_head]. If False, the + output will be of shape [batch_size, from_seq_length, num_attention_heads + * size_per_head]. + batch_size: (Optional) int. If the input is 2D, this might be the batch size + of the 3D version of the `from_tensor` and `to_tensor`. + from_seq_length: (Optional) If the input is 2D, this might be the seq length + of the 3D version of the `from_tensor`. + to_seq_length: (Optional) If the input is 2D, this might be the seq length + of the 3D version of the `to_tensor`. + + Returns: + float Tensor of shape [batch_size, from_seq_length, + num_attention_heads * size_per_head]. (If `do_return_2d_tensor` is + true, this will be of shape [batch_size * from_seq_length, + num_attention_heads * size_per_head]). + + Raises: + ValueError: Any of the arguments or tensor shapes are invalid. + """ + + def transpose_for_scores(input_tensor, batch_size, num_attention_heads, + seq_length, width): + output_tensor = tf.reshape( + input_tensor, [batch_size, seq_length, num_attention_heads, width]) + + output_tensor = tf.transpose(a=output_tensor, perm=[0, 2, 1, 3]) + return output_tensor + + from_shape = get_shape_list(from_tensor, expected_rank=[2, 3]) + to_shape = get_shape_list(to_tensor, expected_rank=[2, 3]) + + if len(from_shape) != len(to_shape): + raise ValueError( + "The rank of `from_tensor` must match the rank of `to_tensor`.") + + if len(from_shape) == 3: + batch_size = from_shape[0] + from_seq_length = from_shape[1] + to_seq_length = to_shape[1] + elif len(from_shape) == 2: + if (batch_size is None or from_seq_length is None or to_seq_length is None): + raise ValueError( + "When passing in rank 2 tensors to attention_layer, the values " + "for `batch_size`, `from_seq_length`, and `to_seq_length` " + "must all be specified.") + + # Scalar dimensions referenced here: + # B = batch size (number of sequences) + # F = `from_tensor` sequence length + # T = `to_tensor` sequence length + # N = `num_attention_heads` + # H = `size_per_head` + + from_tensor_2d = reshape_to_matrix(from_tensor) + to_tensor_2d = reshape_to_matrix(to_tensor) + + # `query_layer` = [B*F, N*H] + query_layer = tf.compat.v1.layers.dense( + from_tensor_2d, + num_attention_heads * size_per_head, + activation=query_act, + name="query", + kernel_initializer=create_initializer(initializer_range)) + + # `key_layer` = [B*T, N*H] + key_layer = tf.compat.v1.layers.dense( + to_tensor_2d, + num_attention_heads * size_per_head, + activation=key_act, + name="key", + kernel_initializer=create_initializer(initializer_range)) + + # `value_layer` = [B*T, N*H] + value_layer = tf.compat.v1.layers.dense( + to_tensor_2d, + num_attention_heads * size_per_head, + activation=value_act, + name="value", + kernel_initializer=create_initializer(initializer_range)) + + # `query_layer` = [B, N, F, H] + query_layer = transpose_for_scores(query_layer, batch_size, + num_attention_heads, from_seq_length, + size_per_head) + + # `key_layer` = [B, N, T, H] + key_layer = transpose_for_scores(key_layer, batch_size, num_attention_heads, + to_seq_length, size_per_head) + + # Take the dot product between "query" and "key" to get the raw + # attention scores. + # `attention_scores` = [B, N, F, T] + attention_scores = tf.matmul(query_layer, key_layer, transpose_b=True) + attention_scores = tf.multiply(attention_scores, + 1.0 / math.sqrt(float(size_per_head))) + + if attention_mask is not None: + # `attention_mask` = [B, 1, F, T] + attention_mask = tf.expand_dims(attention_mask, axis=[1]) + + # Since attention_mask is 1.0 for positions we want to attend and 0.0 for + # masked positions, this operation will create a tensor which is 0.0 for + # positions we want to attend and -10000.0 for masked positions. + adder = (1.0 - tf.cast(attention_mask, tf.float32)) * -10000.0 + + # Since we are adding it to the raw scores before the softmax, this is + # effectively the same as removing these entirely. + attention_scores += adder + + # Normalize the attention scores to probabilities. + # `attention_probs` = [B, N, F, T] + attention_probs = tf.nn.softmax(attention_scores) + + # This is actually dropping out entire tokens to attend to, which might + # seem a bit unusual, but is taken from the original Transformer paper. + attention_probs = dropout(attention_probs, attention_probs_dropout_prob) + + # `value_layer` = [B, T, N, H] + value_layer = tf.reshape( + value_layer, + [batch_size, to_seq_length, num_attention_heads, size_per_head]) + + # `value_layer` = [B, N, T, H] + value_layer = tf.transpose(a=value_layer, perm=[0, 2, 1, 3]) + + # `context_layer` = [B, N, F, H] + context_layer = tf.matmul(attention_probs, value_layer) + + # `context_layer` = [B, F, N, H] + context_layer = tf.transpose(a=context_layer, perm=[0, 2, 1, 3]) + + if do_return_2d_tensor: + # `context_layer` = [B*F, N*H] + context_layer = tf.reshape( + context_layer, + [batch_size * from_seq_length, num_attention_heads * size_per_head]) + else: + # `context_layer` = [B, F, N*H] + context_layer = tf.reshape( + context_layer, + [batch_size, from_seq_length, num_attention_heads * size_per_head]) + + return context_layer + + +def transformer_model(input_tensor, + attention_mask=None, + hidden_size=768, + num_hidden_layers=12, + num_attention_heads=12, + intermediate_size=3072, + intermediate_act_fn=gelu, + hidden_dropout_prob=0.1, + attention_probs_dropout_prob=0.1, + initializer_range=0.02, + do_return_all_layers=False): + """Multi-headed, multi-layer Transformer from "Attention is All You Need". + + This is almost an exact implementation of the original Transformer encoder. + + See the original paper: + https://arxiv.org/abs/1706.03762 + + Also see: + https://github.com/tensorflow/tensor2tensor/blob/master/tensor2tensor/models/transformer.py + + Args: + input_tensor: float Tensor of shape [batch_size, seq_length, hidden_size]. + attention_mask: (optional) int32 Tensor of shape [batch_size, seq_length, + seq_length], with 1 for positions that can be attended to and 0 in + positions that should not be. + hidden_size: int. Hidden size of the Transformer. + num_hidden_layers: int. Number of layers (blocks) in the Transformer. + num_attention_heads: int. Number of attention heads in the Transformer. + intermediate_size: int. The size of the "intermediate" (a.k.a., feed + forward) layer. + intermediate_act_fn: function. The non-linear activation function to apply + to the output of the intermediate/feed-forward layer. + hidden_dropout_prob: float. Dropout probability for the hidden layers. + attention_probs_dropout_prob: float. Dropout probability of the attention + probabilities. + initializer_range: float. Range of the initializer (stddev of truncated + normal). + do_return_all_layers: Whether to also return all layers or just the final + layer. + + Returns: + float Tensor of shape [batch_size, seq_length, hidden_size], the final + hidden layer of the Transformer. + + Raises: + ValueError: A Tensor shape or parameter is invalid. + """ + if hidden_size % num_attention_heads != 0: + raise ValueError( + "The hidden size (%d) is not a multiple of the number of attention " + "heads (%d)" % (hidden_size, num_attention_heads)) + + attention_head_size = int(hidden_size / num_attention_heads) + input_shape = get_shape_list(input_tensor, expected_rank=3) + batch_size = input_shape[0] + seq_length = input_shape[1] + input_width = input_shape[2] + + # The Transformer performs sum residuals on all layers so the input needs + # to be the same as the hidden size. + if input_width != hidden_size: + raise ValueError("The width of the input tensor (%d) != hidden size (%d)" % + (input_width, hidden_size)) + + # We keep the representation as a 2D tensor to avoid re-shaping it back and + # forth from a 3D tensor to a 2D tensor. Re-shapes are normally free on + # the GPU/CPU but may not be free on the TPU, so we want to minimize them to + # help the optimizer. + prev_output = reshape_to_matrix(input_tensor) + + all_layer_outputs = [] + for layer_idx in range(num_hidden_layers): + with tf.compat.v1.variable_scope("layer_%d" % layer_idx): + layer_input = prev_output + + with tf.compat.v1.variable_scope("attention"): + attention_heads = [] + with tf.compat.v1.variable_scope("self"): + attention_head = attention_layer( + from_tensor=layer_input, + to_tensor=layer_input, + attention_mask=attention_mask, + num_attention_heads=num_attention_heads, + size_per_head=attention_head_size, + attention_probs_dropout_prob=attention_probs_dropout_prob, + initializer_range=initializer_range, + do_return_2d_tensor=True, + batch_size=batch_size, + from_seq_length=seq_length, + to_seq_length=seq_length) + attention_heads.append(attention_head) + + attention_output = None + if len(attention_heads) == 1: + attention_output = attention_heads[0] + else: + # In the case where we have other sequences, we just concatenate + # them to the self-attention head before the projection. + attention_output = tf.concat(attention_heads, axis=-1) + + # Run a linear projection of `hidden_size` then add a residual + # with `layer_input`. + with tf.compat.v1.variable_scope("output"): + attention_output = tf.compat.v1.layers.dense( + attention_output, + hidden_size, + kernel_initializer=create_initializer(initializer_range)) + attention_output = dropout(attention_output, hidden_dropout_prob) + attention_output = layer_norm(attention_output + layer_input) + + # The activation is only applied to the "intermediate" hidden layer. + with tf.compat.v1.variable_scope("intermediate"): + intermediate_output = tf.compat.v1.layers.dense( + attention_output, + intermediate_size, + activation=intermediate_act_fn, + kernel_initializer=create_initializer(initializer_range)) + + # Down-project back to `hidden_size` then add the residual. + with tf.compat.v1.variable_scope("output"): + layer_output = tf.compat.v1.layers.dense( + intermediate_output, + hidden_size, + kernel_initializer=create_initializer(initializer_range)) + layer_output = dropout(layer_output, hidden_dropout_prob) + layer_output = layer_norm(layer_output + attention_output) + prev_output = layer_output + all_layer_outputs.append(layer_output) + + if do_return_all_layers: + final_outputs = [] + for layer_output in all_layer_outputs: + final_output = reshape_from_matrix(layer_output, input_shape) + final_outputs.append(final_output) + return final_outputs + else: + final_output = reshape_from_matrix(prev_output, input_shape) + return final_output + + +def get_shape_list(tensor, expected_rank=None, name=None): + """Returns a list of the shape of tensor, preferring static dimensions. + + Args: + tensor: A tf.Tensor object to find the shape of. + expected_rank: (optional) int. The expected rank of `tensor`. If this is + specified and the `tensor` has a different rank, and exception will be + thrown. + name: Optional name of the tensor for the error message. + + Returns: + A list of dimensions of the shape of tensor. All static dimensions will + be returned as python integers, and dynamic dimensions will be returned + as tf.Tensor scalars. + """ + if name is None: + name = tensor.name + + if expected_rank is not None: + assert_rank(tensor, expected_rank, name) + + shape = tensor.shape.as_list() + + non_static_indexes = [] + for (index, dim) in enumerate(shape): + if dim is None: + non_static_indexes.append(index) + + if not non_static_indexes: + return shape + + dyn_shape = tf.shape(input=tensor) + for index in non_static_indexes: + shape[index] = dyn_shape[index] + return shape + + +def reshape_to_matrix(input_tensor): + """Reshapes a >= rank 2 tensor to a rank 2 tensor (i.e., a matrix).""" + ndims = input_tensor.shape.ndims + if ndims < 2: + raise ValueError("Input tensor must have at least rank 2. Shape = %s" % + (input_tensor.shape)) + if ndims == 2: + return input_tensor + + width = input_tensor.shape[-1] + output_tensor = tf.reshape(input_tensor, [-1, width]) + return output_tensor + + +def reshape_from_matrix(output_tensor, orig_shape_list): + """Reshapes a rank 2 tensor back to its original rank >= 2 tensor.""" + if len(orig_shape_list) == 2: + return output_tensor + + output_shape = get_shape_list(output_tensor) + + orig_dims = orig_shape_list[0:-1] + width = output_shape[-1] + + return tf.reshape(output_tensor, orig_dims + [width]) + + +def assert_rank(tensor, expected_rank, name=None): + """Raises an exception if the tensor rank is not of the expected rank. + + Args: + tensor: A tf.Tensor to check the rank of. + expected_rank: Python integer or list of integers, expected rank. + name: Optional name of the tensor for the error message. + + Raises: + ValueError: If the expected shape doesn't match the actual shape. + """ + if name is None: + name = tensor.name + + expected_rank_dict = {} + if isinstance(expected_rank, six.integer_types): + expected_rank_dict[expected_rank] = True + else: + for x in expected_rank: + expected_rank_dict[x] = True + + actual_rank = tensor.shape.ndims + if actual_rank not in expected_rank_dict: + scope_name = tf.compat.v1.get_variable_scope().name + raise ValueError( + "For the tensor `%s` in scope `%s`, the actual rank " + "`%d` (shape = %s) is not equal to the expected rank `%s`" % + (name, scope_name, actual_rank, str(tensor.shape), str(expected_rank))) diff --git a/exp-4-bert-squad/modeling_test.py b/exp-4-bert-squad/modeling_test.py new file mode 100644 index 0000000..92ed2cb --- /dev/null +++ b/exp-4-bert-squad/modeling_test.py @@ -0,0 +1,277 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import collections +import json +import random +import re + +import modeling +import six +import tensorflow as tf + + +class BertModelTest(tf.test.TestCase): + + class BertModelTester(object): + + def __init__(self, + parent, + batch_size=13, + seq_length=7, + is_training=True, + use_input_mask=True, + use_token_type_ids=True, + vocab_size=99, + hidden_size=32, + num_hidden_layers=5, + num_attention_heads=4, + intermediate_size=37, + hidden_act="gelu", + hidden_dropout_prob=0.1, + attention_probs_dropout_prob=0.1, + max_position_embeddings=512, + type_vocab_size=16, + initializer_range=0.02, + scope=None): + self.parent = parent + self.batch_size = batch_size + self.seq_length = seq_length + self.is_training = is_training + self.use_input_mask = use_input_mask + self.use_token_type_ids = use_token_type_ids + self.vocab_size = vocab_size + self.hidden_size = hidden_size + self.num_hidden_layers = num_hidden_layers + self.num_attention_heads = num_attention_heads + self.intermediate_size = intermediate_size + self.hidden_act = hidden_act + self.hidden_dropout_prob = hidden_dropout_prob + self.attention_probs_dropout_prob = attention_probs_dropout_prob + self.max_position_embeddings = max_position_embeddings + self.type_vocab_size = type_vocab_size + self.initializer_range = initializer_range + self.scope = scope + + def create_model(self): + input_ids = BertModelTest.ids_tensor([self.batch_size, self.seq_length], + self.vocab_size) + + input_mask = None + if self.use_input_mask: + input_mask = BertModelTest.ids_tensor( + [self.batch_size, self.seq_length], vocab_size=2) + + token_type_ids = None + if self.use_token_type_ids: + token_type_ids = BertModelTest.ids_tensor( + [self.batch_size, self.seq_length], self.type_vocab_size) + + config = modeling.BertConfig( + vocab_size=self.vocab_size, + hidden_size=self.hidden_size, + num_hidden_layers=self.num_hidden_layers, + num_attention_heads=self.num_attention_heads, + intermediate_size=self.intermediate_size, + hidden_act=self.hidden_act, + hidden_dropout_prob=self.hidden_dropout_prob, + attention_probs_dropout_prob=self.attention_probs_dropout_prob, + max_position_embeddings=self.max_position_embeddings, + type_vocab_size=self.type_vocab_size, + initializer_range=self.initializer_range) + + model = modeling.BertModel( + config=config, + is_training=self.is_training, + input_ids=input_ids, + input_mask=input_mask, + token_type_ids=token_type_ids, + scope=self.scope) + + outputs = { + "embedding_output": model.get_embedding_output(), + "sequence_output": model.get_sequence_output(), + "pooled_output": model.get_pooled_output(), + "all_encoder_layers": model.get_all_encoder_layers(), + } + return outputs + + def check_output(self, result): + self.parent.assertAllEqual( + result["embedding_output"].shape, + [self.batch_size, self.seq_length, self.hidden_size]) + + self.parent.assertAllEqual( + result["sequence_output"].shape, + [self.batch_size, self.seq_length, self.hidden_size]) + + self.parent.assertAllEqual(result["pooled_output"].shape, + [self.batch_size, self.hidden_size]) + + def test_default(self): + self.run_tester(BertModelTest.BertModelTester(self)) + + def test_config_to_json_string(self): + config = modeling.BertConfig(vocab_size=99, hidden_size=37) + obj = json.loads(config.to_json_string()) + self.assertEqual(obj["vocab_size"], 99) + self.assertEqual(obj["hidden_size"], 37) + + def run_tester(self, tester): + with self.test_session() as sess: + ops = tester.create_model() + init_op = tf.group(tf.compat.v1.global_variables_initializer(), + tf.compat.v1.local_variables_initializer()) + sess.run(init_op) + output_result = sess.run(ops) + tester.check_output(output_result) + + self.assert_all_tensors_reachable(sess, [init_op, ops]) + + @classmethod + def ids_tensor(cls, shape, vocab_size, rng=None, name=None): + """Creates a random int32 tensor of the shape within the vocab size.""" + if rng is None: + rng = random.Random() + + total_dims = 1 + for dim in shape: + total_dims *= dim + + values = [] + for _ in range(total_dims): + values.append(rng.randint(0, vocab_size - 1)) + + return tf.constant(value=values, dtype=tf.int32, shape=shape, name=name) + + def assert_all_tensors_reachable(self, sess, outputs): + """Checks that all the tensors in the graph are reachable from outputs.""" + graph = sess.graph + + ignore_strings = [ + "^.*/assert_less_equal/.*$", + "^.*/dilation_rate$", + "^.*/Tensordot/concat$", + "^.*/Tensordot/concat/axis$", + "^testing/.*$", + ] + + ignore_regexes = [re.compile(x) for x in ignore_strings] + + unreachable = self.get_unreachable_ops(graph, outputs) + filtered_unreachable = [] + for x in unreachable: + do_ignore = False + for r in ignore_regexes: + m = r.match(x.name) + if m is not None: + do_ignore = True + if do_ignore: + continue + filtered_unreachable.append(x) + unreachable = filtered_unreachable + + self.assertEqual( + len(unreachable), 0, "The following ops are unreachable: %s" % + (" ".join([x.name for x in unreachable]))) + + @classmethod + def get_unreachable_ops(cls, graph, outputs): + """Finds all of the tensors in graph that are unreachable from outputs.""" + outputs = cls.flatten_recursive(outputs) + output_to_op = collections.defaultdict(list) + op_to_all = collections.defaultdict(list) + assign_out_to_in = collections.defaultdict(list) + + for op in graph.get_operations(): + for x in op.inputs: + op_to_all[op.name].append(x.name) + for y in op.outputs: + output_to_op[y.name].append(op.name) + op_to_all[op.name].append(y.name) + if str(op.type) == "Assign": + for y in op.outputs: + for x in op.inputs: + assign_out_to_in[y.name].append(x.name) + + assign_groups = collections.defaultdict(list) + for out_name in assign_out_to_in.keys(): + name_group = assign_out_to_in[out_name] + for n1 in name_group: + assign_groups[n1].append(out_name) + for n2 in name_group: + if n1 != n2: + assign_groups[n1].append(n2) + + seen_tensors = {} + stack = [x.name for x in outputs] + while stack: + name = stack.pop() + if name in seen_tensors: + continue + seen_tensors[name] = True + + if name in output_to_op: + for op_name in output_to_op[name]: + if op_name in op_to_all: + for input_name in op_to_all[op_name]: + if input_name not in stack: + stack.append(input_name) + + expanded_names = [] + if name in assign_groups: + for assign_name in assign_groups[name]: + expanded_names.append(assign_name) + + for expanded_name in expanded_names: + if expanded_name not in stack: + stack.append(expanded_name) + + unreachable_ops = [] + for op in graph.get_operations(): + is_unreachable = False + all_names = [x.name for x in op.inputs] + [x.name for x in op.outputs] + for name in all_names: + if name not in seen_tensors: + is_unreachable = True + if is_unreachable: + unreachable_ops.append(op) + return unreachable_ops + + @classmethod + def flatten_recursive(cls, item): + """Flattens (potentially nested) a tuple/dictionary/list to a list.""" + output = [] + if isinstance(item, list): + output.extend(item) + elif isinstance(item, tuple): + output.extend(list(item)) + elif isinstance(item, dict): + for (_, v) in six.iteritems(item): + output.append(v) + else: + return [item] + + flat_output = [] + for x in output: + flat_output.extend(cls.flatten_recursive(x)) + return flat_output + + +if __name__ == "__main__": + tf.test.main() diff --git a/exp-4-bert-squad/old-files-bert/README.md b/exp-4-bert-squad/old-files-bert/README.md new file mode 100644 index 0000000..2639ef9 --- /dev/null +++ b/exp-4-bert-squad/old-files-bert/README.md @@ -0,0 +1,171 @@ +# SQuAD 1.1 energy consumption + +## Download model +https://github.com/google-research/bert +*Model specification:* **BERT-Base, Uncased** + +## Download data +https://github.com/google-research/bert +- train-v1.1.json +- dev-v1.1.json +- evaluate-v1.1.py + +## Environment preparation +``` +export BERT_BASE_DIR=/path/to/bert/downloaded_model +export SQUAD_DIR=/path/to/downloaded/data +``` +## Installation +To install: +``` +pip install requirements.txt +``` + +## Tensorflow estimator replacement +Replace **estimator.py** and **tpu_estimator.py** files in created virtual environment directly. + +Updated tensorflow estimator files: +``` +tf_updated_files/estimator.py +tf_updated_files/tpu_estimator.py +``` + +Files to replace: +``` +/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/estimator.py +``` + +``` +venv/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu/tpu_estimator.py +``` + +## Modification to Carbon Tracker package + +The line +``` +devices = [name.decode("utf-8") for name in names] +``` +is replaced with +``` +devices = [name for name in names] +``` +in the following file: /home/demouser/miniconda3/envs/cenv_tf/lib/python3.7/site-packages/carbontracker/components/gpu/nvidia.py + +## Usage +**Programming language:** +Developed and tested using python 3.7.16 + +**RAPL usage:** +``` +sudo chmod o+r /sys/class/powercap/intel-rapl\:0/energy_uj +``` + +**Create output directory:** +``` +mkdir output +mkdir output/squad_base +mkdir output/calculator_output +``` + +## Start fine-tuning BERT +``` +python run_squad.py \ + --vocab_file=$BERT_BASE_DIR/vocab.txt \ + --bert_config_file=$BERT_BASE_DIR/bert_config.json \ + --init_checkpoint=$BERT_BASE_DIR/bert_model.ckpt \ + --do_train=True \ + --train_file=$SQUAD_DIR/train-v1.1.json \ + --do_predict=True \ + --predict_file=$SQUAD_DIR/dev-v1.1.json \ + --train_batch_size=12 \ + --learning_rate=3e-5 \ + --num_train_epochs=2.0 \ + --max_seq_length=384 \ + --doc_stride=128 \ + --output_dir=/output/squad_base/ \ + --calculator= +``` + +#### *Supported calculators* +- code_carbon +- carbon_tracker +- eco2ai +- impact_tracker + + + +## Usage on the Alienware: + +``` +export BERT_BASE_DIR=/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12 +export SQUAD_DIR=/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/data +export OUTPUT_ESTIMATOR=./output/calculator_output +``` + +Example without calculator: +``` +python run_squad.py --vocab_file=$BERT_BASE_DIR/vocab.txt --bert_config_file=$BERT_BASE_DIR/bert_config.json --init_checkpoint=$BERT_BASE_DIR/bert_model.ckpt --do_train=True --train_file=$SQUAD_DIR/train-extracted.json --do_predict=False --predict_file=$SQUAD_DIR/dev-v1.1.json --train_batch_size=12 --learning_rate=3e-5 --num_train_epochs=3.0 --max_seq_length=384 --doc_stride=128 --output_dir=output/squad_base/ +``` + +``` +python run_squad.py --vocab_file=$BERT_BASE_DIR/vocab.txt --bert_config_file=$BERT_BASE_DIR/bert_config.json --init_checkpoint=$BERT_BASE_DIR/bert_model.ckpt --do_train=True --train_file=$SQUAD_DIR/train-extracted.json --do_predict=False --predict_file=$SQUAD_DIR/dev-v1.1.json --train_batch_size=12 --learning_rate=3e-5 --num_train_epochs=3.0 --doc_stride=128 --output_dir=output/squad_base/ +``` + + +Training with whole dataset 3 epochs: +``` +python nlp_bert_squad/run_squad.py --vocab_file=$BERT_BASE_DIR/vocab.txt --bert_config_file=$BERT_BASE_DIR/bert_config.json --init_checkpoint=$BERT_BASE_DIR/bert_model.ckpt --do_predict=True --do_train=True --train_file=$SQUAD_DIR/train-v1.1.json --do_predict=False --predict_file=$SQUAD_DIR/dev-v1.1.json --train_batch_size=3 --learning_rate=3e-5 --num_train_epochs=3.0 --doc_stride=128 --output_dir=nlp_bert_squad/output/squad_base/ +``` + +Training with smaller dataset 1 epoch: +python nlp_bert_squad/run_squad.py --vocab_file=$BERT_BASE_DIR/vocab.txt --bert_config_file=$BERT_BASE_DIR/bert_config.json --init_checkpoint=$BERT_BASE_DIR/bert_model.ckpt --do_predict=True --do_train=True --train_file=$SQUAD_DIR/train-extracted.json --do_predict=False --predict_file=$SQUAD_DIR/dev-v1.1.json --train_batch_size=12 --learning_rate=3e-5 --num_train_epochs=1.0 --doc_stride=128 --output_dir=nlp_bert_squad/output/squad_base/ + +Inference with smaller dataset (Super_Bowl_50 paragraph): +python nlp_bert_squad/run_squad.py --vocab_file=$BERT_BASE_DIR/vocab.txt --bert_config_file=$BERT_BASE_DIR/bert_config.json --init_checkpoint=$BERT_BASE_DIR/bert_model.ckpt --do_predict=True --do_train=False --train_file=$SQUAD_DIR/train-extracted.json --do_predict=True --predict_file=$SQUAD_DIR/dev-v1.1.json --train_batch_size=3 --learning_rate=3e-5 --num_train_epochs=1.0 --doc_stride=128 --output_dir=nlp_bert_squad/output/squad_base/ + + + +Example with a calculator: + +``` +python run_squad.py --vocab_file=$BERT_BASE_DIR/vocab.txt --bert_config_file=$BERT_BASE_DIR/bert_config.json --init_checkpoint=$BERT_BASE_DIR/bert_model.ckpt --do_train=True --train_file=$SQUAD_DIR/train-extracted.json --do_predict=False --predict_file=$SQUAD_DIR/dev-v1.1.json --train_batch_size=12 --learning_rate=3e-5 --num_train_epochs=3.0 --max_seq_length=384 --doc_stride=128 --output_dir=output/squad_base/ --calculator=carbon_tracker +``` + + + + + +## Create the environment: + +source: https://www.tensorflow.org/install/pip + +``` +conda create -n cenv_tf python=3.7 + +conda activate cenv_tf + +conda install -c conda-forge cudatoolkit=11.8.0 + +pip install nvidia-cudnn-cu11 + +pip install tensorflow + +mkdir -p $CONDA_PREFIX/etc/conda/activate.d + +echo 'CUDNN_PATH=$(dirname $(python -c "import nvidia.cudnn;print(nvidia.cudnn.__file__)"))' >> $CONDA_PREFIX/etc/conda/activate.d/env_vars.sh + +echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$CONDA_PREFIX/lib/:$CUDNN_PATH/lib' >> $CONDA_PREFIX/etc/conda/activate.d/env_vars.sh + +source $CONDA_PREFIX/etc/conda/activate.d/env_vars.sh +``` + +Verify install: +``` +python3 -c "import tensorflow as tf; print(tf.config.list_physical_devices('GPU'))" +``` + +Change the estimator.py and estimator-tpu.py files at locations: + +- cenv_tf/lib/python3.7/site-packages/tensorflow_estimator/python/estimator + +- cenv_tf/lib/python3.7/site-packages/tensorflow_estimator/python/estimator/tpu \ No newline at end of file diff --git a/exp-4-bert-squad/old-files-bert/run_squad-OLD.py b/exp-4-bert-squad/old-files-bert/run_squad-OLD.py new file mode 100644 index 0000000..e342799 --- /dev/null +++ b/exp-4-bert-squad/old-files-bert/run_squad-OLD.py @@ -0,0 +1,1352 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Run BERT on SQuAD 1.1 and SQuAD 2.0.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import collections +import json +import math +import os +import random +import modeling +import optimization +import tokenization +import six +import tensorflow as tf +import tf_slim as slim + +from codecarbon import EmissionsTracker +from carbontracker.tracker import CarbonTracker +from carbontracker import parser as CTparser +import eco2ai +from experiment_impact_tracker.compute_tracker import ImpactTracker + + +flags = tf.compat.v1.flags + +FLAGS = flags.FLAGS + +## Required parameters +flags.DEFINE_string( + "bert_config_file", None, + "The config json file corresponding to the pre-trained BERT model. " + "This specifies the model architecture.") + +flags.DEFINE_string("vocab_file", None, + "The vocabulary file that the BERT model was trained on.") + +flags.DEFINE_string( + "output_dir", None, + "The output directory where the model checkpoints will be written.") + +## Other parameters +flags.DEFINE_string("calculator", None, + "calculator to meassure energy usage (Use carbon_tracker or code_carbon or eco2ai or impact_tracker)") + +flags.DEFINE_string("train_file", None, + "SQuAD json for training. E.g., train-v1.1.json") + +flags.DEFINE_string( + "predict_file", None, + "SQuAD json for predictions. E.g., dev-v1.1.json or test-v1.1.json") + +flags.DEFINE_string( + "init_checkpoint", None, + "Initial checkpoint (usually from a pre-trained BERT model).") + +flags.DEFINE_bool( + "do_lower_case", True, + "Whether to lower case the input text. Should be True for uncased " + "models and False for cased models.") + +flags.DEFINE_integer( + "max_seq_length", 384, + "The maximum total input sequence length after WordPiece tokenization. " + "Sequences longer than this will be truncated, and sequences shorter " + "than this will be padded.") + +flags.DEFINE_integer( + "doc_stride", 128, + "When splitting up a long document into chunks, how much stride to " + "take between chunks.") + +flags.DEFINE_integer( + "max_query_length", 64, + "The maximum number of tokens for the question. Questions longer than " + "this will be truncated to this length.") + +flags.DEFINE_bool("do_train", False, "Whether to run training.") + +flags.DEFINE_bool("do_predict", False, "Whether to run eval on the dev set.") + +flags.DEFINE_integer("train_batch_size", 32, "Total batch size for training.") + +flags.DEFINE_integer("predict_batch_size", 8, + "Total batch size for predictions.") + +flags.DEFINE_float("learning_rate", 5e-5, "The initial learning rate for Adam.") + +flags.DEFINE_float("num_train_epochs", 3.0, + "Total number of training epochs to perform.") + +flags.DEFINE_float( + "warmup_proportion", 0.1, + "Proportion of training to perform linear learning rate warmup for. " + "E.g., 0.1 = 10% of training.") + +flags.DEFINE_integer("save_checkpoints_steps", 1000, + "How often to save the model checkpoint.") + +flags.DEFINE_integer("iterations_per_loop", 1000, + "How many steps to make in each estimator call.") + +flags.DEFINE_integer( + "n_best_size", 20, + "The total number of n-best predictions to generate in the " + "nbest_predictions.json output file.") + +flags.DEFINE_integer( + "max_answer_length", 30, + "The maximum length of an answer that can be generated. This is needed " + "because the start and end predictions are not conditioned on one another.") + +flags.DEFINE_bool("use_tpu", False, "Whether to use TPU or GPU/CPU.") + +tf.compat.v1.flags.DEFINE_string( + "tpu_name", None, + "The Cloud TPU to use for training. This should be either the name " + "used when creating the Cloud TPU, or a grpc://ip.address.of.tpu:8470 " + "url.") + +tf.compat.v1.flags.DEFINE_string( + "tpu_zone", None, + "[Optional] GCE zone where the Cloud TPU is located in. If not " + "specified, we will attempt to automatically detect the GCE project from " + "metadata.") + +tf.compat.v1.flags.DEFINE_string( + "gcp_project", None, + "[Optional] Project name for the Cloud TPU-enabled project. If not " + "specified, we will attempt to automatically detect the GCE project from " + "metadata.") + +tf.compat.v1.flags.DEFINE_string("master", None, "[Optional] TensorFlow master URL.") + +flags.DEFINE_integer( + "num_tpu_cores", 8, + "Only used if `use_tpu` is True. Total number of TPU cores to use.") + +flags.DEFINE_bool( + "verbose_logging", False, + "If true, all of the warnings related to data processing will be printed. " + "A number of warnings are expected for a normal SQuAD evaluation.") + +flags.DEFINE_bool( + "version_2_with_negative", False, + "If true, the SQuAD examples contain some that do not have an answer.") + +flags.DEFINE_float( + "null_score_diff_threshold", 0.0, + "If null_score - best_non_null is greater than the threshold predict null.") + + +class SquadExample(object): + """A single training/test example for simple sequence classification. + + For examples without an answer, the start and end position are -1. + """ + + def __init__(self, + qas_id, + question_text, + doc_tokens, + orig_answer_text=None, + start_position=None, + end_position=None, + is_impossible=False): + self.qas_id = qas_id + self.question_text = question_text + self.doc_tokens = doc_tokens + self.orig_answer_text = orig_answer_text + self.start_position = start_position + self.end_position = end_position + self.is_impossible = is_impossible + + def __str__(self): + return self.__repr__() + + def __repr__(self): + s = "" + s += "qas_id: %s" % (tokenization.printable_text(self.qas_id)) + s += ", question_text: %s" % ( + tokenization.printable_text(self.question_text)) + s += ", doc_tokens: [%s]" % (" ".join(self.doc_tokens)) + if self.start_position: + s += ", start_position: %d" % (self.start_position) + if self.start_position: + s += ", end_position: %d" % (self.end_position) + if self.start_position: + s += ", is_impossible: %r" % (self.is_impossible) + return s + + +class InputFeatures(object): + """A single set of features of data.""" + + def __init__(self, + unique_id, + example_index, + doc_span_index, + tokens, + token_to_orig_map, + token_is_max_context, + input_ids, + input_mask, + segment_ids, + start_position=None, + end_position=None, + is_impossible=None): + self.unique_id = unique_id + self.example_index = example_index + self.doc_span_index = doc_span_index + self.tokens = tokens + self.token_to_orig_map = token_to_orig_map + self.token_is_max_context = token_is_max_context + self.input_ids = input_ids + self.input_mask = input_mask + self.segment_ids = segment_ids + self.start_position = start_position + self.end_position = end_position + self.is_impossible = is_impossible + + +def read_squad_examples(input_file, is_training): + """Read a SQuAD json file into a list of SquadExample.""" + with tf.io.gfile.GFile(input_file, "r") as reader: + input_data = json.load(reader)["data"] + + def is_whitespace(c): + if c == " " or c == "\t" or c == "\r" or c == "\n" or ord(c) == 0x202F: + return True + return False + + examples = [] + for entry in input_data: + for paragraph in entry["paragraphs"]: + paragraph_text = paragraph["context"] + doc_tokens = [] + char_to_word_offset = [] + prev_is_whitespace = True + for c in paragraph_text: + if is_whitespace(c): + prev_is_whitespace = True + else: + if prev_is_whitespace: + doc_tokens.append(c) + else: + doc_tokens[-1] += c + prev_is_whitespace = False + char_to_word_offset.append(len(doc_tokens) - 1) + + for qa in paragraph["qas"]: + qas_id = qa["id"] + question_text = qa["question"] + start_position = None + end_position = None + orig_answer_text = None + is_impossible = False + if is_training: + + if FLAGS.version_2_with_negative: + is_impossible = qa["is_impossible"] + if (len(qa["answers"]) != 1) and (not is_impossible): + raise ValueError( + "For training, each question should have exactly 1 answer.") + if not is_impossible: + answer = qa["answers"][0] + orig_answer_text = answer["text"] + answer_offset = answer["answer_start"] + answer_length = len(orig_answer_text) + start_position = char_to_word_offset[answer_offset] + end_position = char_to_word_offset[answer_offset + answer_length - + 1] + # Only add answers where the text can be exactly recovered from the + # document. If this CAN'T happen it's likely due to weird Unicode + # stuff so we will just skip the example. + # + # Note that this means for training mode, every example is NOT + # guaranteed to be preserved. + actual_text = " ".join( + doc_tokens[start_position:(end_position + 1)]) + cleaned_answer_text = " ".join( + tokenization.whitespace_tokenize(orig_answer_text)) + if actual_text.find(cleaned_answer_text) == -1: + tf.compat.v1.logging.warning("Could not find answer: '%s' vs. '%s'", + actual_text, cleaned_answer_text) + continue + else: + start_position = -1 + end_position = -1 + orig_answer_text = "" + + example = SquadExample( + qas_id=qas_id, + question_text=question_text, + doc_tokens=doc_tokens, + orig_answer_text=orig_answer_text, + start_position=start_position, + end_position=end_position, + is_impossible=is_impossible) + examples.append(example) + + return examples + + +def convert_examples_to_features(examples, tokenizer, max_seq_length, + doc_stride, max_query_length, is_training, + output_fn): + """Loads a data file into a list of `InputBatch`s.""" + + unique_id = 1000000000 + + for (example_index, example) in enumerate(examples): + query_tokens = tokenizer.tokenize(example.question_text) + + if len(query_tokens) > max_query_length: + query_tokens = query_tokens[0:max_query_length] + + tok_to_orig_index = [] + orig_to_tok_index = [] + all_doc_tokens = [] + for (i, token) in enumerate(example.doc_tokens): + orig_to_tok_index.append(len(all_doc_tokens)) + sub_tokens = tokenizer.tokenize(token) + for sub_token in sub_tokens: + tok_to_orig_index.append(i) + all_doc_tokens.append(sub_token) + + tok_start_position = None + tok_end_position = None + if is_training and example.is_impossible: + tok_start_position = -1 + tok_end_position = -1 + if is_training and not example.is_impossible: + tok_start_position = orig_to_tok_index[example.start_position] + if example.end_position < len(example.doc_tokens) - 1: + tok_end_position = orig_to_tok_index[example.end_position + 1] - 1 + else: + tok_end_position = len(all_doc_tokens) - 1 + (tok_start_position, tok_end_position) = _improve_answer_span( + all_doc_tokens, tok_start_position, tok_end_position, tokenizer, + example.orig_answer_text) + + # The -3 accounts for [CLS], [SEP] and [SEP] + max_tokens_for_doc = max_seq_length - len(query_tokens) - 3 + + # We can have documents that are longer than the maximum sequence length. + # To deal with this we do a sliding window approach, where we take chunks + # of the up to our max length with a stride of `doc_stride`. + _DocSpan = collections.namedtuple( # pylint: disable=invalid-name + "DocSpan", ["start", "length"]) + doc_spans = [] + start_offset = 0 + while start_offset < len(all_doc_tokens): + length = len(all_doc_tokens) - start_offset + if length > max_tokens_for_doc: + length = max_tokens_for_doc + doc_spans.append(_DocSpan(start=start_offset, length=length)) + if start_offset + length == len(all_doc_tokens): + break + start_offset += min(length, doc_stride) + + for (doc_span_index, doc_span) in enumerate(doc_spans): + tokens = [] + token_to_orig_map = {} + token_is_max_context = {} + segment_ids = [] + tokens.append("[CLS]") + segment_ids.append(0) + for token in query_tokens: + tokens.append(token) + segment_ids.append(0) + tokens.append("[SEP]") + segment_ids.append(0) + + for i in range(doc_span.length): + split_token_index = doc_span.start + i + token_to_orig_map[len(tokens)] = tok_to_orig_index[split_token_index] + + is_max_context = _check_is_max_context(doc_spans, doc_span_index, + split_token_index) + token_is_max_context[len(tokens)] = is_max_context + tokens.append(all_doc_tokens[split_token_index]) + segment_ids.append(1) + tokens.append("[SEP]") + segment_ids.append(1) + + input_ids = tokenizer.convert_tokens_to_ids(tokens) + + # The mask has 1 for real tokens and 0 for padding tokens. Only real + # tokens are attended to. + input_mask = [1] * len(input_ids) + + # Zero-pad up to the sequence length. + while len(input_ids) < max_seq_length: + input_ids.append(0) + input_mask.append(0) + segment_ids.append(0) + + assert len(input_ids) == max_seq_length + assert len(input_mask) == max_seq_length + assert len(segment_ids) == max_seq_length + + start_position = None + end_position = None + if is_training and not example.is_impossible: + # For training, if our document chunk does not contain an annotation + # we throw it out, since there is nothing to predict. + doc_start = doc_span.start + doc_end = doc_span.start + doc_span.length - 1 + out_of_span = False + if not (tok_start_position >= doc_start and + tok_end_position <= doc_end): + out_of_span = True + if out_of_span: + start_position = 0 + end_position = 0 + else: + doc_offset = len(query_tokens) + 2 + start_position = tok_start_position - doc_start + doc_offset + end_position = tok_end_position - doc_start + doc_offset + + if is_training and example.is_impossible: + start_position = 0 + end_position = 0 + + if example_index < 20: + tf.compat.v1.logging.info("*** Example ***") + tf.compat.v1.logging.info("unique_id: %s" % (unique_id)) + tf.compat.v1.logging.info("example_index: %s" % (example_index)) + tf.compat.v1.logging.info("doc_span_index: %s" % (doc_span_index)) + tf.compat.v1.logging.info("tokens: %s" % " ".join( + [tokenization.printable_text(x) for x in tokens])) + tf.compat.v1.logging.info("token_to_orig_map: %s" % " ".join( + ["%d:%d" % (x, y) for (x, y) in six.iteritems(token_to_orig_map)])) + tf.compat.v1.logging.info("token_is_max_context: %s" % " ".join([ + "%d:%s" % (x, y) for (x, y) in six.iteritems(token_is_max_context) + ])) + tf.compat.v1.logging.info("input_ids: %s" % " ".join([str(x) for x in input_ids])) + tf.compat.v1.logging.info( + "input_mask: %s" % " ".join([str(x) for x in input_mask])) + tf.compat.v1.logging.info( + "segment_ids: %s" % " ".join([str(x) for x in segment_ids])) + if is_training and example.is_impossible: + tf.compat.v1.logging.info("impossible example") + if is_training and not example.is_impossible: + answer_text = " ".join(tokens[start_position:(end_position + 1)]) + tf.compat.v1.logging.info("start_position: %d" % (start_position)) + tf.compat.v1.logging.info("end_position: %d" % (end_position)) + tf.compat.v1.logging.info( + "answer: %s" % (tokenization.printable_text(answer_text))) + + feature = InputFeatures( + unique_id=unique_id, + example_index=example_index, + doc_span_index=doc_span_index, + tokens=tokens, + token_to_orig_map=token_to_orig_map, + token_is_max_context=token_is_max_context, + input_ids=input_ids, + input_mask=input_mask, + segment_ids=segment_ids, + start_position=start_position, + end_position=end_position, + is_impossible=example.is_impossible) + + # Run callback + output_fn(feature) + + unique_id += 1 + + +def _improve_answer_span(doc_tokens, input_start, input_end, tokenizer, + orig_answer_text): + """Returns tokenized answer spans that better match the annotated answer.""" + + # The SQuAD annotations are character based. We first project them to + # whitespace-tokenized words. But then after WordPiece tokenization, we can + # often find a "better match". For example: + # + # Question: What year was John Smith born? + # Context: The leader was John Smith (1895-1943). + # Answer: 1895 + # + # The original whitespace-tokenized answer will be "(1895-1943).". However + # after tokenization, our tokens will be "( 1895 - 1943 ) .". So we can match + # the exact answer, 1895. + # + # However, this is not always possible. Consider the following: + # + # Question: What country is the top exporter of electornics? + # Context: The Japanese electronics industry is the lagest in the world. + # Answer: Japan + # + # In this case, the annotator chose "Japan" as a character sub-span of + # the word "Japanese". Since our WordPiece tokenizer does not split + # "Japanese", we just use "Japanese" as the annotation. This is fairly rare + # in SQuAD, but does happen. + tok_answer_text = " ".join(tokenizer.tokenize(orig_answer_text)) + + for new_start in range(input_start, input_end + 1): + for new_end in range(input_end, new_start - 1, -1): + text_span = " ".join(doc_tokens[new_start:(new_end + 1)]) + if text_span == tok_answer_text: + return (new_start, new_end) + + return (input_start, input_end) + + +def _check_is_max_context(doc_spans, cur_span_index, position): + """Check if this is the 'max context' doc span for the token.""" + + # Because of the sliding window approach taken to scoring documents, a single + # token can appear in multiple documents. E.g. + # Doc: the man went to the store and bought a gallon of milk + # Span A: the man went to the + # Span B: to the store and bought + # Span C: and bought a gallon of + # ... + # + # Now the word 'bought' will have two scores from spans B and C. We only + # want to consider the score with "maximum context", which we define as + # the *minimum* of its left and right context (the *sum* of left and + # right context will always be the same, of course). + # + # In the example the maximum context for 'bought' would be span C since + # it has 1 left context and 3 right context, while span B has 4 left context + # and 0 right context. + best_score = None + best_span_index = None + for (span_index, doc_span) in enumerate(doc_spans): + end = doc_span.start + doc_span.length - 1 + if position < doc_span.start: + continue + if position > end: + continue + num_left_context = position - doc_span.start + num_right_context = end - position + score = min(num_left_context, num_right_context) + 0.01 * doc_span.length + if best_score is None or score > best_score: + best_score = score + best_span_index = span_index + + return cur_span_index == best_span_index + + +def create_model(bert_config, is_training, input_ids, input_mask, segment_ids, + use_one_hot_embeddings): + """Creates a classification model.""" + model = modeling.BertModel( + config=bert_config, + is_training=is_training, + input_ids=input_ids, + input_mask=input_mask, + token_type_ids=segment_ids, + use_one_hot_embeddings=use_one_hot_embeddings) + + final_hidden = model.get_sequence_output() + + final_hidden_shape = modeling.get_shape_list(final_hidden, expected_rank=3) + batch_size = final_hidden_shape[0] + seq_length = final_hidden_shape[1] + hidden_size = final_hidden_shape[2] + + output_weights = tf.compat.v1.get_variable( + "cls/squad/output_weights", [2, hidden_size], + initializer=tf.compat.v1.truncated_normal_initializer(stddev=0.02)) + + output_bias = tf.compat.v1.get_variable( + "cls/squad/output_bias", [2], initializer=tf.compat.v1.zeros_initializer()) + + final_hidden_matrix = tf.reshape(final_hidden, + [batch_size * seq_length, hidden_size]) + logits = tf.matmul(final_hidden_matrix, output_weights, transpose_b=True) + logits = tf.nn.bias_add(logits, output_bias) + + logits = tf.reshape(logits, [batch_size, seq_length, 2]) + logits = tf.transpose(a=logits, perm=[2, 0, 1]) + + unstacked_logits = tf.unstack(logits, axis=0) + + (start_logits, end_logits) = (unstacked_logits[0], unstacked_logits[1]) + + return (start_logits, end_logits) + + +def model_fn_builder(bert_config, init_checkpoint, learning_rate, + num_train_steps, num_warmup_steps, use_tpu, + use_one_hot_embeddings): + """Returns `model_fn` closure for TPUEstimator.""" + + def model_fn(features, labels, mode, params): # pylint: disable=unused-argument + """The `model_fn` for TPUEstimator.""" + + tf.compat.v1.logging.info("*** Features ***") + for name in sorted(features.keys()): + tf.compat.v1.logging.info(" name = %s, shape = %s" % (name, features[name].shape)) + + unique_ids = features["unique_ids"] + input_ids = features["input_ids"] + input_mask = features["input_mask"] + segment_ids = features["segment_ids"] + + is_training = (mode == tf.estimator.ModeKeys.TRAIN) + + (start_logits, end_logits) = create_model( + bert_config=bert_config, + is_training=is_training, + input_ids=input_ids, + input_mask=input_mask, + segment_ids=segment_ids, + use_one_hot_embeddings=use_one_hot_embeddings) + + tvars = tf.compat.v1.trainable_variables() + + initialized_variable_names = {} + scaffold_fn = None + if init_checkpoint: + (assignment_map, initialized_variable_names + ) = modeling.get_assignment_map_from_checkpoint(tvars, init_checkpoint) + if use_tpu: + + def tpu_scaffold(): + tf.compat.v1.train.init_from_checkpoint(init_checkpoint, assignment_map) + return tf.compat.v1.train.Scaffold() + + scaffold_fn = tpu_scaffold + else: + tf.compat.v1.train.init_from_checkpoint(init_checkpoint, assignment_map) + + tf.compat.v1.logging.info("**** Trainable Variables ****") + for var in tvars: + init_string = "" + if var.name in initialized_variable_names: + init_string = ", *INIT_FROM_CKPT*" + tf.compat.v1.logging.info(" name = %s, shape = %s%s", var.name, var.shape, + init_string) + + output_spec = None + if mode == tf.estimator.ModeKeys.TRAIN: + seq_length = modeling.get_shape_list(input_ids)[1] + + def compute_loss(logits, positions): + one_hot_positions = tf.one_hot( + positions, depth=seq_length, dtype=tf.float32) + log_probs = tf.nn.log_softmax(logits, axis=-1) + loss = -tf.reduce_mean( + input_tensor=tf.reduce_sum(input_tensor=one_hot_positions * log_probs, axis=-1)) + return loss + + start_positions = features["start_positions"] + end_positions = features["end_positions"] + + start_loss = compute_loss(start_logits, start_positions) + end_loss = compute_loss(end_logits, end_positions) + + total_loss = (start_loss + end_loss) / 2.0 + + train_op = optimization.create_optimizer( + total_loss, learning_rate, num_train_steps, num_warmup_steps, use_tpu) + + output_spec = tf.compat.v1.estimator.tpu.TPUEstimatorSpec( + mode=mode, + loss=total_loss, + train_op=train_op, + scaffold_fn=scaffold_fn) + elif mode == tf.estimator.ModeKeys.PREDICT: + predictions = { + "unique_ids": unique_ids, + "start_logits": start_logits, + "end_logits": end_logits, + } + output_spec = tf.compat.v1.estimator.tpu.TPUEstimatorSpec( + mode=mode, predictions=predictions, scaffold_fn=scaffold_fn) + else: + raise ValueError( + "Only TRAIN and PREDICT modes are supported: %s" % (mode)) + + return output_spec + + return model_fn + + +def input_fn_builder(input_file, seq_length, is_training, drop_remainder): + """Creates an `input_fn` closure to be passed to TPUEstimator.""" + + name_to_features = { + "unique_ids": tf.io.FixedLenFeature([], tf.int64), + "input_ids": tf.io.FixedLenFeature([seq_length], tf.int64), + "input_mask": tf.io.FixedLenFeature([seq_length], tf.int64), + "segment_ids": tf.io.FixedLenFeature([seq_length], tf.int64), + } + + if is_training: + name_to_features["start_positions"] = tf.io.FixedLenFeature([], tf.int64) + name_to_features["end_positions"] = tf.io.FixedLenFeature([], tf.int64) + + def _decode_record(record, name_to_features): + """Decodes a record to a TensorFlow example.""" + example = tf.io.parse_single_example(serialized=record, features=name_to_features) + + # tf.Example only supports tf.int64, but the TPU only supports tf.int32. + # So cast all int64 to int32. + for name in list(example.keys()): + t = example[name] + if t.dtype == tf.int64: + t = tf.cast(t, dtype=tf.int32) + example[name] = t + + return example + + def input_fn(params): + """The actual input function.""" + batch_size = params["batch_size"] + + # For training, we want a lot of parallel reading and shuffling. + # For eval, we want no shuffling and parallel reading doesn't matter. + d = tf.data.TFRecordDataset(input_file) + if is_training: + d = d.repeat() + d = d.shuffle(buffer_size=100) + + d = d.apply( + tf.data.experimental.map_and_batch( + lambda record: _decode_record(record, name_to_features), + batch_size=batch_size, + drop_remainder=drop_remainder)) + + return d + + return input_fn + + +RawResult = collections.namedtuple("RawResult", + ["unique_id", "start_logits", "end_logits"]) + + +def write_predictions(all_examples, all_features, all_results, n_best_size, + max_answer_length, do_lower_case, output_prediction_file, + output_nbest_file, output_null_log_odds_file): + """Write final predictions to the json file and log-odds of null if needed.""" + tf.compat.v1.logging.info("Writing predictions to: %s" % (output_prediction_file)) + tf.compat.v1.logging.info("Writing nbest to: %s" % (output_nbest_file)) + + example_index_to_features = collections.defaultdict(list) + for feature in all_features: + example_index_to_features[feature.example_index].append(feature) + + unique_id_to_result = {} + for result in all_results: + unique_id_to_result[result.unique_id] = result + + _PrelimPrediction = collections.namedtuple( # pylint: disable=invalid-name + "PrelimPrediction", + ["feature_index", "start_index", "end_index", "start_logit", "end_logit"]) + + all_predictions = collections.OrderedDict() + all_nbest_json = collections.OrderedDict() + scores_diff_json = collections.OrderedDict() + + for (example_index, example) in enumerate(all_examples): + features = example_index_to_features[example_index] + + prelim_predictions = [] + # keep track of the minimum score of null start+end of position 0 + score_null = 1000000 # large and positive + min_null_feature_index = 0 # the paragraph slice with min mull score + null_start_logit = 0 # the start logit at the slice with min null score + null_end_logit = 0 # the end logit at the slice with min null score + for (feature_index, feature) in enumerate(features): + result = unique_id_to_result[feature.unique_id] + start_indexes = _get_best_indexes(result.start_logits, n_best_size) + end_indexes = _get_best_indexes(result.end_logits, n_best_size) + # if we could have irrelevant answers, get the min score of irrelevant + if FLAGS.version_2_with_negative: + feature_null_score = result.start_logits[0] + result.end_logits[0] + if feature_null_score < score_null: + score_null = feature_null_score + min_null_feature_index = feature_index + null_start_logit = result.start_logits[0] + null_end_logit = result.end_logits[0] + for start_index in start_indexes: + for end_index in end_indexes: + # We could hypothetically create invalid predictions, e.g., predict + # that the start of the span is in the question. We throw out all + # invalid predictions. + if start_index >= len(feature.tokens): + continue + if end_index >= len(feature.tokens): + continue + if start_index not in feature.token_to_orig_map: + continue + if end_index not in feature.token_to_orig_map: + continue + if not feature.token_is_max_context.get(start_index, False): + continue + if end_index < start_index: + continue + length = end_index - start_index + 1 + if length > max_answer_length: + continue + prelim_predictions.append( + _PrelimPrediction( + feature_index=feature_index, + start_index=start_index, + end_index=end_index, + start_logit=result.start_logits[start_index], + end_logit=result.end_logits[end_index])) + + if FLAGS.version_2_with_negative: + prelim_predictions.append( + _PrelimPrediction( + feature_index=min_null_feature_index, + start_index=0, + end_index=0, + start_logit=null_start_logit, + end_logit=null_end_logit)) + prelim_predictions = sorted( + prelim_predictions, + key=lambda x: (x.start_logit + x.end_logit), + reverse=True) + + _NbestPrediction = collections.namedtuple( # pylint: disable=invalid-name + "NbestPrediction", ["text", "start_logit", "end_logit"]) + + seen_predictions = {} + nbest = [] + for pred in prelim_predictions: + if len(nbest) >= n_best_size: + break + feature = features[pred.feature_index] + if pred.start_index > 0: # this is a non-null prediction + tok_tokens = feature.tokens[pred.start_index:(pred.end_index + 1)] + orig_doc_start = feature.token_to_orig_map[pred.start_index] + orig_doc_end = feature.token_to_orig_map[pred.end_index] + orig_tokens = example.doc_tokens[orig_doc_start:(orig_doc_end + 1)] + tok_text = " ".join(tok_tokens) + + # De-tokenize WordPieces that have been split off. + tok_text = tok_text.replace(" ##", "") + tok_text = tok_text.replace("##", "") + + # Clean whitespace + tok_text = tok_text.strip() + tok_text = " ".join(tok_text.split()) + orig_text = " ".join(orig_tokens) + + final_text = get_final_text(tok_text, orig_text, do_lower_case) + if final_text in seen_predictions: + continue + + seen_predictions[final_text] = True + else: + final_text = "" + seen_predictions[final_text] = True + + nbest.append( + _NbestPrediction( + text=final_text, + start_logit=pred.start_logit, + end_logit=pred.end_logit)) + + # if we didn't inlude the empty option in the n-best, inlcude it + if FLAGS.version_2_with_negative: + if "" not in seen_predictions: + nbest.append( + _NbestPrediction( + text="", start_logit=null_start_logit, + end_logit=null_end_logit)) + # In very rare edge cases we could have no valid predictions. So we + # just create a nonce prediction in this case to avoid failure. + if not nbest: + nbest.append( + _NbestPrediction(text="empty", start_logit=0.0, end_logit=0.0)) + + assert len(nbest) >= 1 + + total_scores = [] + best_non_null_entry = None + for entry in nbest: + total_scores.append(entry.start_logit + entry.end_logit) + if not best_non_null_entry: + if entry.text: + best_non_null_entry = entry + + probs = _compute_softmax(total_scores) + + nbest_json = [] + for (i, entry) in enumerate(nbest): + output = collections.OrderedDict() + output["text"] = entry.text + output["probability"] = probs[i] + output["start_logit"] = entry.start_logit + output["end_logit"] = entry.end_logit + nbest_json.append(output) + + assert len(nbest_json) >= 1 + + if not FLAGS.version_2_with_negative: + all_predictions[example.qas_id] = nbest_json[0]["text"] + else: + # predict "" iff the null score - the score of best non-null > threshold + score_diff = score_null - best_non_null_entry.start_logit - ( + best_non_null_entry.end_logit) + scores_diff_json[example.qas_id] = score_diff + if score_diff > FLAGS.null_score_diff_threshold: + all_predictions[example.qas_id] = "" + else: + all_predictions[example.qas_id] = best_non_null_entry.text + + all_nbest_json[example.qas_id] = nbest_json + + with tf.io.gfile.GFile(output_prediction_file, "w") as writer: + writer.write(json.dumps(all_predictions, indent=4) + "\n") + + with tf.io.gfile.GFile(output_nbest_file, "w") as writer: + writer.write(json.dumps(all_nbest_json, indent=4) + "\n") + + if FLAGS.version_2_with_negative: + with tf.io.gfile.GFile(output_null_log_odds_file, "w") as writer: + writer.write(json.dumps(scores_diff_json, indent=4) + "\n") + + +def get_final_text(pred_text, orig_text, do_lower_case): + """Project the tokenized prediction back to the original text.""" + + # When we created the data, we kept track of the alignment between original + # (whitespace tokenized) tokens and our WordPiece tokenized tokens. So + # now `orig_text` contains the span of our original text corresponding to the + # span that we predicted. + # + # However, `orig_text` may contain extra characters that we don't want in + # our prediction. + # + # For example, let's say: + # pred_text = steve smith + # orig_text = Steve Smith's + # + # We don't want to return `orig_text` because it contains the extra "'s". + # + # We don't want to return `pred_text` because it's already been normalized + # (the SQuAD eval script also does punctuation stripping/lower casing but + # our tokenizer does additional normalization like stripping accent + # characters). + # + # What we really want to return is "Steve Smith". + # + # Therefore, we have to apply a semi-complicated alignment heruistic between + # `pred_text` and `orig_text` to get a character-to-charcter alignment. This + # can fail in certain cases in which case we just return `orig_text`. + + def _strip_spaces(text): + ns_chars = [] + ns_to_s_map = collections.OrderedDict() + for (i, c) in enumerate(text): + if c == " ": + continue + ns_to_s_map[len(ns_chars)] = i + ns_chars.append(c) + ns_text = "".join(ns_chars) + return (ns_text, ns_to_s_map) + + # We first tokenize `orig_text`, strip whitespace from the result + # and `pred_text`, and check if they are the same length. If they are + # NOT the same length, the heuristic has failed. If they are the same + # length, we assume the characters are one-to-one aligned. + tokenizer = tokenization.BasicTokenizer(do_lower_case=do_lower_case) + + tok_text = " ".join(tokenizer.tokenize(orig_text)) + + start_position = tok_text.find(pred_text) + if start_position == -1: + if FLAGS.verbose_logging: + tf.compat.v1.logging.info( + "Unable to find text: '%s' in '%s'" % (pred_text, orig_text)) + return orig_text + end_position = start_position + len(pred_text) - 1 + + (orig_ns_text, orig_ns_to_s_map) = _strip_spaces(orig_text) + (tok_ns_text, tok_ns_to_s_map) = _strip_spaces(tok_text) + + if len(orig_ns_text) != len(tok_ns_text): + if FLAGS.verbose_logging: + tf.compat.v1.logging.info("Length not equal after stripping spaces: '%s' vs '%s'", + orig_ns_text, tok_ns_text) + return orig_text + + # We then project the characters in `pred_text` back to `orig_text` using + # the character-to-character alignment. + tok_s_to_ns_map = {} + for (i, tok_index) in six.iteritems(tok_ns_to_s_map): + tok_s_to_ns_map[tok_index] = i + + orig_start_position = None + if start_position in tok_s_to_ns_map: + ns_start_position = tok_s_to_ns_map[start_position] + if ns_start_position in orig_ns_to_s_map: + orig_start_position = orig_ns_to_s_map[ns_start_position] + + if orig_start_position is None: + if FLAGS.verbose_logging: + tf.compat.v1.logging.info("Couldn't map start position") + return orig_text + + orig_end_position = None + if end_position in tok_s_to_ns_map: + ns_end_position = tok_s_to_ns_map[end_position] + if ns_end_position in orig_ns_to_s_map: + orig_end_position = orig_ns_to_s_map[ns_end_position] + + if orig_end_position is None: + if FLAGS.verbose_logging: + tf.compat.v1.logging.info("Couldn't map end position") + return orig_text + + output_text = orig_text[orig_start_position:(orig_end_position + 1)] + return output_text + + +def _get_best_indexes(logits, n_best_size): + """Get the n-best logits from a list.""" + index_and_score = sorted(enumerate(logits), key=lambda x: x[1], reverse=True) + + best_indexes = [] + for i in range(len(index_and_score)): + if i >= n_best_size: + break + best_indexes.append(index_and_score[i][0]) + return best_indexes + + +def _compute_softmax(scores): + """Compute softmax probability over raw logits.""" + if not scores: + return [] + + max_score = None + for score in scores: + if max_score is None or score > max_score: + max_score = score + + exp_scores = [] + total_sum = 0.0 + for score in scores: + x = math.exp(score - max_score) + exp_scores.append(x) + total_sum += x + + probs = [] + for score in exp_scores: + probs.append(score / total_sum) + return probs + + +class FeatureWriter(object): + """Writes InputFeature to TF example file.""" + + def __init__(self, filename, is_training): + self.filename = filename + self.is_training = is_training + self.num_features = 0 + self._writer = tf.io.TFRecordWriter(filename) + + def process_feature(self, feature): + """Write a InputFeature to the TFRecordWriter as a tf.train.Example.""" + self.num_features += 1 + + def create_int_feature(values): + feature = tf.train.Feature( + int64_list=tf.train.Int64List(value=list(values))) + return feature + + features = collections.OrderedDict() + features["unique_ids"] = create_int_feature([feature.unique_id]) + features["input_ids"] = create_int_feature(feature.input_ids) + features["input_mask"] = create_int_feature(feature.input_mask) + features["segment_ids"] = create_int_feature(feature.segment_ids) + + if self.is_training: + features["start_positions"] = create_int_feature([feature.start_position]) + features["end_positions"] = create_int_feature([feature.end_position]) + impossible = 0 + if feature.is_impossible: + impossible = 1 + features["is_impossible"] = create_int_feature([impossible]) + + tf_example = tf.train.Example(features=tf.train.Features(feature=features)) + self._writer.write(tf_example.SerializeToString()) + + def close(self): + self._writer.close() + + +def validate_flags_or_throw(bert_config): + """Validate the input FLAGS or throw an exception.""" + tokenization.validate_case_matches_checkpoint(FLAGS.do_lower_case, + FLAGS.init_checkpoint) + + if not FLAGS.do_train and not FLAGS.do_predict: + raise ValueError("At least one of `do_train` or `do_predict` must be True.") + + if FLAGS.do_train: + if not FLAGS.train_file: + raise ValueError( + "If `do_train` is True, then `train_file` must be specified.") + if FLAGS.do_predict: + if not FLAGS.predict_file: + raise ValueError( + "If `do_predict` is True, then `predict_file` must be specified.") + + if FLAGS.max_seq_length > bert_config.max_position_embeddings: + raise ValueError( + "Cannot use sequence length %d because the BERT model " + "was only trained up to sequence length %d" % + (FLAGS.max_seq_length, bert_config.max_position_embeddings)) + + if FLAGS.max_seq_length <= FLAGS.max_query_length + 3: + raise ValueError( + "The max_seq_length (%d) must be greater than max_query_length " + "(%d) + 3" % (FLAGS.max_seq_length, FLAGS.max_query_length)) + + +def get_calculator(epochs: int, calculator: str = None): + if calculator is None: + print("No calculator is about to be used during the training or inference.") + specific_calculator = None + elif calculator == "code_carbon": + output_file = "output/calculator_output/output_cc_resnet.csv" + specific_calculator = EmissionsTracker(output_file = output_file) + elif calculator == "carbon_tracker": + update_interval = 0.01 + monitor_epochs = -1 + epochs_before_pred = 0 + decimal_precision = 10 + + specific_calculator = CarbonTracker(epochs=epochs, + update_interval = update_interval, + log_dir = "output/calculator_output/", + monitor_epochs = monitor_epochs, + epochs_before_pred = epochs_before_pred, + decimal_precision = decimal_precision) + elif calculator == "eco2ai": + output_file = "output/calculator_output/output_eco2ai.csv" + specific_calculator = eco2ai.Tracker(file_name=output_file, alpha_2_code="FR") + + elif calculator == "impact_tracker": + output_file = "output/calculator_output/" + specific_calculator = ImpactTracker(output_file) + + return specific_calculator + + +def main(_): + tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.INFO) + + bert_config = modeling.BertConfig.from_json_file(FLAGS.bert_config_file) + + validate_flags_or_throw(bert_config) + + tf.io.gfile.makedirs(FLAGS.output_dir) + + tokenizer = tokenization.FullTokenizer( + vocab_file=FLAGS.vocab_file, do_lower_case=FLAGS.do_lower_case) + + tpu_cluster_resolver = None + if FLAGS.use_tpu and FLAGS.tpu_name: + tpu_cluster_resolver = slim.cluster_resolver.TPUClusterResolver( + FLAGS.tpu_name, zone=FLAGS.tpu_zone, project=FLAGS.gcp_project) + + # is_per_host = tf.compat.v1.estimator.tpu.InputPipelineConfig.PER_HOST_V2 + is_per_host = tf.compat.v1.estimator.tpu.InputPipelineConfig.PER_HOST_V2 + run_config = tf.compat.v1.estimator.tpu.RunConfig( + cluster=tpu_cluster_resolver, + master=FLAGS.master, + model_dir=FLAGS.output_dir, + save_checkpoints_steps=FLAGS.save_checkpoints_steps, + tpu_config=tf.compat.v1.estimator.tpu.TPUConfig( + iterations_per_loop=FLAGS.iterations_per_loop, + num_shards=FLAGS.num_tpu_cores, + per_host_input_for_training=is_per_host)) + + train_examples = None + num_train_steps = None + num_warmup_steps = None + if FLAGS.do_train: + train_examples = read_squad_examples( + input_file=FLAGS.train_file, is_training=True) + num_train_steps = int( + len(train_examples) / FLAGS.train_batch_size * FLAGS.num_train_epochs) + num_warmup_steps = int(num_train_steps * FLAGS.warmup_proportion) + + # Pre-shuffle the input to avoid having to make a very large shuffle + # buffer in in the `input_fn`. + rng = random.Random(12345) + rng.shuffle(train_examples) + + model_fn = model_fn_builder( + bert_config=bert_config, + init_checkpoint=FLAGS.init_checkpoint, + learning_rate=FLAGS.learning_rate, + num_train_steps=num_train_steps, + num_warmup_steps=num_warmup_steps, + use_tpu=FLAGS.use_tpu, + use_one_hot_embeddings=FLAGS.use_tpu) + + # If TPU is not available, this will fall back to normal Estimator on CPU + # or GPU. + estimator = tf.compat.v1.estimator.tpu.TPUEstimator( + use_tpu=FLAGS.use_tpu, + model_fn=model_fn, + config=run_config, + train_batch_size=FLAGS.train_batch_size, + predict_batch_size=FLAGS.predict_batch_size) + + if FLAGS.do_train: + # We write to a temporary file to avoid storing very large constant tensors + # in memory. + train_writer = FeatureWriter( + filename=os.path.join(FLAGS.output_dir, "train.tf_record"), + is_training=True) + convert_examples_to_features( + examples=train_examples, + tokenizer=tokenizer, + max_seq_length=FLAGS.max_seq_length, + doc_stride=FLAGS.doc_stride, + max_query_length=FLAGS.max_query_length, + is_training=True, + output_fn=train_writer.process_feature) + train_writer.close() + + tf.compat.v1.logging.info("***** Running training *****") + tf.compat.v1.logging.info(" Num orig examples = %d", len(train_examples)) + tf.compat.v1.logging.info(" Num split examples = %d", train_writer.num_features) + tf.compat.v1.logging.info(" Batch size = %d", FLAGS.train_batch_size) + tf.compat.v1.logging.info(" Num steps = %d", num_train_steps) + del train_examples + + train_input_fn = input_fn_builder( + input_file=train_writer.filename, + seq_length=FLAGS.max_seq_length, + is_training=True, + drop_remainder=True) + + tracker = get_calculator(FLAGS.num_train_epochs, FLAGS.calculator) + + if FLAGS.calculator != "carbon_tracker" and FLAGS.calculator != None and FLAGS.calculator != "impact_tracker": + tracker.start() + if FLAGS.calculator == "impact_tracker": + tracker.launch_impact_monitor() + if FLAGS.calculator == "carbon_tracker": + tracker.epoch_start() + + estimator.train(input_fn=train_input_fn, max_steps=num_train_steps) + + if FLAGS.calculator == "carbon_tracker": + tracker.epoch_end() + if FLAGS.calculator != None and FLAGS.calculator != "impact_tracker"and FLAGS.calculator != "carbon_tracker": + tracker.stop() + + if FLAGS.do_predict: + eval_examples = read_squad_examples( + input_file=FLAGS.predict_file, is_training=False) + + eval_writer = FeatureWriter( + filename=os.path.join(FLAGS.output_dir, "eval.tf_record"), + is_training=False) + eval_features = [] + + def append_feature(feature): + eval_features.append(feature) + eval_writer.process_feature(feature) + + convert_examples_to_features( + examples=eval_examples, + tokenizer=tokenizer, + max_seq_length=FLAGS.max_seq_length, + doc_stride=FLAGS.doc_stride, + max_query_length=FLAGS.max_query_length, + is_training=False, + output_fn=append_feature) + eval_writer.close() + + tf.compat.v1.logging.info("***** Running predictions *****") + tf.compat.v1.logging.info(" Num orig examples = %d", len(eval_examples)) + tf.compat.v1.logging.info(" Num split examples = %d", len(eval_features)) + tf.compat.v1.logging.info(" Batch size = %d", FLAGS.predict_batch_size) + + all_results = [] + + predict_input_fn = input_fn_builder( + input_file=eval_writer.filename, + seq_length=FLAGS.max_seq_length, + is_training=False, + drop_remainder=False) + + # If running eval on the TPU, you will need to specify the number of + # steps. + all_results = [] + if FLAGS.calculator != "carbon_tracker" and FLAGS.calculator != None and FLAGS.calculator != "impact_tracker": + tracker.start() + if FLAGS.calculator == "impact_tracker": + tracker.launch_impact_monitor() + if FLAGS.calculator == "carbon_tracker": + tracker.epoch_start() + + for result in estimator.predict( + predict_input_fn, yield_single_examples=True): + if len(all_results) % 1000 == 0: + tf.compat.v1.logging.info("Processing example: %d" % (len(all_results))) + unique_id = int(result["unique_ids"]) + start_logits = [float(x) for x in result["start_logits"].flat] + end_logits = [float(x) for x in result["end_logits"].flat] + all_results.append( + RawResult( + unique_id=unique_id, + start_logits=start_logits, + end_logits=end_logits)) + + if FLAGS.calculator == "carbon_tracker": + tracker.epoch_end() + if FLAGS.calculator != None and FLAGS.calculator != "impact_tracker"and FLAGS.calculator != "carbon_tracker": + tracker.stop() + + output_prediction_file = os.path.join(FLAGS.output_dir, "predictions.json") + output_nbest_file = os.path.join(FLAGS.output_dir, "nbest_predictions.json") + output_null_log_odds_file = os.path.join(FLAGS.output_dir, "null_odds.json") + + write_predictions(eval_examples, eval_features, all_results, + FLAGS.n_best_size, FLAGS.max_answer_length, + FLAGS.do_lower_case, output_prediction_file, + output_nbest_file, output_null_log_odds_file) + + +if __name__ == "__main__": + flags.mark_flag_as_required("vocab_file") + flags.mark_flag_as_required("bert_config_file") + flags.mark_flag_as_required("output_dir") + tf.compat.v1.app.run() diff --git a/exp-4-bert-squad/optimization.py b/exp-4-bert-squad/optimization.py new file mode 100644 index 0000000..b1692e7 --- /dev/null +++ b/exp-4-bert-squad/optimization.py @@ -0,0 +1,174 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Functions and classes related to optimization (weight updates).""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import re +import tensorflow as tf + + +def create_optimizer(loss, init_lr, num_train_steps, num_warmup_steps, use_tpu): + """Creates an optimizer training op.""" + global_step = tf.compat.v1.train.get_or_create_global_step() + + learning_rate = tf.constant(value=init_lr, shape=[], dtype=tf.float32) + + # Implements linear decay of the learning rate. + learning_rate = tf.compat.v1.train.polynomial_decay( + learning_rate, + global_step, + num_train_steps, + end_learning_rate=0.0, + power=1.0, + cycle=False) + + # Implements linear warmup. I.e., if global_step < num_warmup_steps, the + # learning rate will be `global_step/num_warmup_steps * init_lr`. + if num_warmup_steps: + global_steps_int = tf.cast(global_step, tf.int32) + warmup_steps_int = tf.constant(num_warmup_steps, dtype=tf.int32) + + global_steps_float = tf.cast(global_steps_int, tf.float32) + warmup_steps_float = tf.cast(warmup_steps_int, tf.float32) + + warmup_percent_done = global_steps_float / warmup_steps_float + warmup_learning_rate = init_lr * warmup_percent_done + + is_warmup = tf.cast(global_steps_int < warmup_steps_int, tf.float32) + learning_rate = ( + (1.0 - is_warmup) * learning_rate + is_warmup * warmup_learning_rate) + + # It is recommended that you use this optimizer for fine tuning, since this + # is how the model was trained (note that the Adam m/v variables are NOT + # loaded from init_checkpoint.) + optimizer = AdamWeightDecayOptimizer( + learning_rate=learning_rate, + weight_decay_rate=0.01, + beta_1=0.9, + beta_2=0.999, + epsilon=1e-6, + exclude_from_weight_decay=["LayerNorm", "layer_norm", "bias"]) + + if use_tpu: + optimizer = tf.compat.v1.tpu.CrossShardOptimizer(optimizer) + + tvars = tf.compat.v1.trainable_variables() + grads = tf.gradients(ys=loss, xs=tvars) + + # This is how the model was pre-trained. + (grads, _) = tf.clip_by_global_norm(grads, clip_norm=1.0) + + train_op = optimizer.apply_gradients( + zip(grads, tvars), global_step=global_step) + + # Normally the global step update is done inside of `apply_gradients`. + # However, `AdamWeightDecayOptimizer` doesn't do this. But if you use + # a different optimizer, you should probably take this line out. + new_global_step = global_step + 1 + train_op = tf.group(train_op, [global_step.assign(new_global_step)]) + return train_op + + +class AdamWeightDecayOptimizer(tf.compat.v1.train.Optimizer): + """A basic Adam optimizer that includes "correct" L2 weight decay.""" + + def __init__(self, + learning_rate, + weight_decay_rate=0.0, + beta_1=0.9, + beta_2=0.999, + epsilon=1e-6, + exclude_from_weight_decay=None, + name="AdamWeightDecayOptimizer"): + """Constructs a AdamWeightDecayOptimizer.""" + super(AdamWeightDecayOptimizer, self).__init__(False, name) + + self.learning_rate = learning_rate + self.weight_decay_rate = weight_decay_rate + self.beta_1 = beta_1 + self.beta_2 = beta_2 + self.epsilon = epsilon + self.exclude_from_weight_decay = exclude_from_weight_decay + + def apply_gradients(self, grads_and_vars, global_step=None, name=None): + """See base class.""" + assignments = [] + for (grad, param) in grads_and_vars: + if grad is None or param is None: + continue + + param_name = self._get_variable_name(param.name) + + m = tf.compat.v1.get_variable( + name=param_name + "/adam_m", + shape=param.shape.as_list(), + dtype=tf.float32, + trainable=False, + initializer=tf.compat.v1.zeros_initializer()) + v = tf.compat.v1.get_variable( + name=param_name + "/adam_v", + shape=param.shape.as_list(), + dtype=tf.float32, + trainable=False, + initializer=tf.compat.v1.zeros_initializer()) + + # Standard Adam update. + next_m = ( + tf.multiply(self.beta_1, m) + tf.multiply(1.0 - self.beta_1, grad)) + next_v = ( + tf.multiply(self.beta_2, v) + tf.multiply(1.0 - self.beta_2, + tf.square(grad))) + + update = next_m / (tf.sqrt(next_v) + self.epsilon) + + # Just adding the square of the weights to the loss function is *not* + # the correct way of using L2 regularization/weight decay with Adam, + # since that will interact with the m and v parameters in strange ways. + # + # Instead we want ot decay the weights in a manner that doesn't interact + # with the m/v parameters. This is equivalent to adding the square + # of the weights to the loss with plain (non-momentum) SGD. + if self._do_use_weight_decay(param_name): + update += self.weight_decay_rate * param + + update_with_lr = self.learning_rate * update + + next_param = param - update_with_lr + + assignments.extend( + [param.assign(next_param), + m.assign(next_m), + v.assign(next_v)]) + return tf.group(*assignments, name=name) + + def _do_use_weight_decay(self, param_name): + """Whether to use L2 weight decay for `param_name`.""" + if not self.weight_decay_rate: + return False + if self.exclude_from_weight_decay: + for r in self.exclude_from_weight_decay: + if re.search(r, param_name) is not None: + return False + return True + + def _get_variable_name(self, param_name): + """Get the variable name from the tensor name.""" + m = re.match("^(.*):\\d+$", param_name) + if m is not None: + param_name = m.group(1) + return param_name diff --git a/exp-4-bert-squad/optimization_test.py b/exp-4-bert-squad/optimization_test.py new file mode 100644 index 0000000..781f959 --- /dev/null +++ b/exp-4-bert-squad/optimization_test.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import optimization +import tensorflow as tf + + +class OptimizationTest(tf.test.TestCase): + + def test_adam(self): + with self.test_session() as sess: + w = tf.compat.v1.get_variable( + "w", + shape=[3], + initializer=tf.compat.v1.constant_initializer([0.1, -0.2, -0.1])) + x = tf.constant([0.4, 0.2, -0.5]) + loss = tf.reduce_mean(input_tensor=tf.square(x - w)) + tvars = tf.compat.v1.trainable_variables() + grads = tf.gradients(ys=loss, xs=tvars) + global_step = tf.compat.v1.train.get_or_create_global_step() + optimizer = optimization.AdamWeightDecayOptimizer(learning_rate=0.2) + train_op = optimizer.apply_gradients(zip(grads, tvars), global_step) + init_op = tf.group(tf.compat.v1.global_variables_initializer(), + tf.compat.v1.local_variables_initializer()) + sess.run(init_op) + for _ in range(100): + sess.run(train_op) + w_np = sess.run(w) + self.assertAllClose(w_np.flat, [0.4, 0.2, -0.5], rtol=1e-2, atol=1e-2) + + +if __name__ == "__main__": + tf.test.main() diff --git a/exp-4-bert-squad/requirements.txt b/exp-4-bert-squad/requirements.txt new file mode 100644 index 0000000..56a9e37 --- /dev/null +++ b/exp-4-bert-squad/requirements.txt @@ -0,0 +1,6 @@ +tensorflow +tf_slim +carbontracker +codecarbon +eco2ai +experiment_impact_tracker diff --git a/exp-4-bert-squad/run_classifier.py b/exp-4-bert-squad/run_classifier.py new file mode 100644 index 0000000..7c42cb7 --- /dev/null +++ b/exp-4-bert-squad/run_classifier.py @@ -0,0 +1,981 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""BERT finetuning runner.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import collections +import csv +import os +import modeling +import optimization +import tokenization +import tensorflow as tf + +flags = tf.flags + +FLAGS = flags.FLAGS + +## Required parameters +flags.DEFINE_string( + "data_dir", None, + "The input data dir. Should contain the .tsv files (or other data files) " + "for the task.") + +flags.DEFINE_string( + "bert_config_file", None, + "The config json file corresponding to the pre-trained BERT model. " + "This specifies the model architecture.") + +flags.DEFINE_string("task_name", None, "The name of the task to train.") + +flags.DEFINE_string("vocab_file", None, + "The vocabulary file that the BERT model was trained on.") + +flags.DEFINE_string( + "output_dir", None, + "The output directory where the model checkpoints will be written.") + +## Other parameters + +flags.DEFINE_string( + "init_checkpoint", None, + "Initial checkpoint (usually from a pre-trained BERT model).") + +flags.DEFINE_bool( + "do_lower_case", True, + "Whether to lower case the input text. Should be True for uncased " + "models and False for cased models.") + +flags.DEFINE_integer( + "max_seq_length", 128, + "The maximum total input sequence length after WordPiece tokenization. " + "Sequences longer than this will be truncated, and sequences shorter " + "than this will be padded.") + +flags.DEFINE_bool("do_train", False, "Whether to run training.") + +flags.DEFINE_bool("do_eval", False, "Whether to run eval on the dev set.") + +flags.DEFINE_bool( + "do_predict", False, + "Whether to run the model in inference mode on the test set.") + +flags.DEFINE_integer("train_batch_size", 32, "Total batch size for training.") + +flags.DEFINE_integer("eval_batch_size", 8, "Total batch size for eval.") + +flags.DEFINE_integer("predict_batch_size", 8, "Total batch size for predict.") + +flags.DEFINE_float("learning_rate", 5e-5, "The initial learning rate for Adam.") + +flags.DEFINE_float("num_train_epochs", 3.0, + "Total number of training epochs to perform.") + +flags.DEFINE_float( + "warmup_proportion", 0.1, + "Proportion of training to perform linear learning rate warmup for. " + "E.g., 0.1 = 10% of training.") + +flags.DEFINE_integer("save_checkpoints_steps", 1000, + "How often to save the model checkpoint.") + +flags.DEFINE_integer("iterations_per_loop", 1000, + "How many steps to make in each estimator call.") + +flags.DEFINE_bool("use_tpu", False, "Whether to use TPU or GPU/CPU.") + +tf.flags.DEFINE_string( + "tpu_name", None, + "The Cloud TPU to use for training. This should be either the name " + "used when creating the Cloud TPU, or a grpc://ip.address.of.tpu:8470 " + "url.") + +tf.flags.DEFINE_string( + "tpu_zone", None, + "[Optional] GCE zone where the Cloud TPU is located in. If not " + "specified, we will attempt to automatically detect the GCE project from " + "metadata.") + +tf.flags.DEFINE_string( + "gcp_project", None, + "[Optional] Project name for the Cloud TPU-enabled project. If not " + "specified, we will attempt to automatically detect the GCE project from " + "metadata.") + +tf.flags.DEFINE_string("master", None, "[Optional] TensorFlow master URL.") + +flags.DEFINE_integer( + "num_tpu_cores", 8, + "Only used if `use_tpu` is True. Total number of TPU cores to use.") + + +class InputExample(object): + """A single training/test example for simple sequence classification.""" + + def __init__(self, guid, text_a, text_b=None, label=None): + """Constructs a InputExample. + + Args: + guid: Unique id for the example. + text_a: string. The untokenized text of the first sequence. For single + sequence tasks, only this sequence must be specified. + text_b: (Optional) string. The untokenized text of the second sequence. + Only must be specified for sequence pair tasks. + label: (Optional) string. The label of the example. This should be + specified for train and dev examples, but not for test examples. + """ + self.guid = guid + self.text_a = text_a + self.text_b = text_b + self.label = label + + +class PaddingInputExample(object): + """Fake example so the num input examples is a multiple of the batch size. + + When running eval/predict on the TPU, we need to pad the number of examples + to be a multiple of the batch size, because the TPU requires a fixed batch + size. The alternative is to drop the last batch, which is bad because it means + the entire output data won't be generated. + + We use this class instead of `None` because treating `None` as padding + battches could cause silent errors. + """ + + +class InputFeatures(object): + """A single set of features of data.""" + + def __init__(self, + input_ids, + input_mask, + segment_ids, + label_id, + is_real_example=True): + self.input_ids = input_ids + self.input_mask = input_mask + self.segment_ids = segment_ids + self.label_id = label_id + self.is_real_example = is_real_example + + +class DataProcessor(object): + """Base class for data converters for sequence classification data sets.""" + + def get_train_examples(self, data_dir): + """Gets a collection of `InputExample`s for the train set.""" + raise NotImplementedError() + + def get_dev_examples(self, data_dir): + """Gets a collection of `InputExample`s for the dev set.""" + raise NotImplementedError() + + def get_test_examples(self, data_dir): + """Gets a collection of `InputExample`s for prediction.""" + raise NotImplementedError() + + def get_labels(self): + """Gets the list of labels for this data set.""" + raise NotImplementedError() + + @classmethod + def _read_tsv(cls, input_file, quotechar=None): + """Reads a tab separated value file.""" + with tf.io.gfile.GFile(input_file, "r") as f: + reader = csv.reader(f, delimiter="\t", quotechar=quotechar) + lines = [] + for line in reader: + lines.append(line) + return lines + + +class XnliProcessor(DataProcessor): + """Processor for the XNLI data set.""" + + def __init__(self): + self.language = "zh" + + def get_train_examples(self, data_dir): + """See base class.""" + lines = self._read_tsv( + os.path.join(data_dir, "multinli", + "multinli.train.%s.tsv" % self.language)) + examples = [] + for (i, line) in enumerate(lines): + if i == 0: + continue + guid = "train-%d" % (i) + text_a = tokenization.convert_to_unicode(line[0]) + text_b = tokenization.convert_to_unicode(line[1]) + label = tokenization.convert_to_unicode(line[2]) + if label == tokenization.convert_to_unicode("contradictory"): + label = tokenization.convert_to_unicode("contradiction") + examples.append( + InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) + return examples + + def get_dev_examples(self, data_dir): + """See base class.""" + lines = self._read_tsv(os.path.join(data_dir, "xnli.dev.tsv")) + examples = [] + for (i, line) in enumerate(lines): + if i == 0: + continue + guid = "dev-%d" % (i) + language = tokenization.convert_to_unicode(line[0]) + if language != tokenization.convert_to_unicode(self.language): + continue + text_a = tokenization.convert_to_unicode(line[6]) + text_b = tokenization.convert_to_unicode(line[7]) + label = tokenization.convert_to_unicode(line[1]) + examples.append( + InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) + return examples + + def get_labels(self): + """See base class.""" + return ["contradiction", "entailment", "neutral"] + + +class MnliProcessor(DataProcessor): + """Processor for the MultiNLI data set (GLUE version).""" + + def get_train_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "train.tsv")), "train") + + def get_dev_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "dev_matched.tsv")), + "dev_matched") + + def get_test_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "test_matched.tsv")), "test") + + def get_labels(self): + """See base class.""" + return ["contradiction", "entailment", "neutral"] + + def _create_examples(self, lines, set_type): + """Creates examples for the training and dev sets.""" + examples = [] + for (i, line) in enumerate(lines): + if i == 0: + continue + guid = "%s-%s" % (set_type, tokenization.convert_to_unicode(line[0])) + text_a = tokenization.convert_to_unicode(line[8]) + text_b = tokenization.convert_to_unicode(line[9]) + if set_type == "test": + label = "contradiction" + else: + label = tokenization.convert_to_unicode(line[-1]) + examples.append( + InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) + return examples + + +class MrpcProcessor(DataProcessor): + """Processor for the MRPC data set (GLUE version).""" + + def get_train_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "train.tsv")), "train") + + def get_dev_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev") + + def get_test_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "test.tsv")), "test") + + def get_labels(self): + """See base class.""" + return ["0", "1"] + + def _create_examples(self, lines, set_type): + """Creates examples for the training and dev sets.""" + examples = [] + for (i, line) in enumerate(lines): + if i == 0: + continue + guid = "%s-%s" % (set_type, i) + text_a = tokenization.convert_to_unicode(line[3]) + text_b = tokenization.convert_to_unicode(line[4]) + if set_type == "test": + label = "0" + else: + label = tokenization.convert_to_unicode(line[0]) + examples.append( + InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) + return examples + + +class ColaProcessor(DataProcessor): + """Processor for the CoLA data set (GLUE version).""" + + def get_train_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "train.tsv")), "train") + + def get_dev_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev") + + def get_test_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "test.tsv")), "test") + + def get_labels(self): + """See base class.""" + return ["0", "1"] + + def _create_examples(self, lines, set_type): + """Creates examples for the training and dev sets.""" + examples = [] + for (i, line) in enumerate(lines): + # Only the test set has a header + if set_type == "test" and i == 0: + continue + guid = "%s-%s" % (set_type, i) + if set_type == "test": + text_a = tokenization.convert_to_unicode(line[1]) + label = "0" + else: + text_a = tokenization.convert_to_unicode(line[3]) + label = tokenization.convert_to_unicode(line[1]) + examples.append( + InputExample(guid=guid, text_a=text_a, text_b=None, label=label)) + return examples + + +def convert_single_example(ex_index, example, label_list, max_seq_length, + tokenizer): + """Converts a single `InputExample` into a single `InputFeatures`.""" + + if isinstance(example, PaddingInputExample): + return InputFeatures( + input_ids=[0] * max_seq_length, + input_mask=[0] * max_seq_length, + segment_ids=[0] * max_seq_length, + label_id=0, + is_real_example=False) + + label_map = {} + for (i, label) in enumerate(label_list): + label_map[label] = i + + tokens_a = tokenizer.tokenize(example.text_a) + tokens_b = None + if example.text_b: + tokens_b = tokenizer.tokenize(example.text_b) + + if tokens_b: + # Modifies `tokens_a` and `tokens_b` in place so that the total + # length is less than the specified length. + # Account for [CLS], [SEP], [SEP] with "- 3" + _truncate_seq_pair(tokens_a, tokens_b, max_seq_length - 3) + else: + # Account for [CLS] and [SEP] with "- 2" + if len(tokens_a) > max_seq_length - 2: + tokens_a = tokens_a[0:(max_seq_length - 2)] + + # The convention in BERT is: + # (a) For sequence pairs: + # tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP] + # type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1 + # (b) For single sequences: + # tokens: [CLS] the dog is hairy . [SEP] + # type_ids: 0 0 0 0 0 0 0 + # + # Where "type_ids" are used to indicate whether this is the first + # sequence or the second sequence. The embedding vectors for `type=0` and + # `type=1` were learned during pre-training and are added to the wordpiece + # embedding vector (and position vector). This is not *strictly* necessary + # since the [SEP] token unambiguously separates the sequences, but it makes + # it easier for the model to learn the concept of sequences. + # + # For classification tasks, the first vector (corresponding to [CLS]) is + # used as the "sentence vector". Note that this only makes sense because + # the entire model is fine-tuned. + tokens = [] + segment_ids = [] + tokens.append("[CLS]") + segment_ids.append(0) + for token in tokens_a: + tokens.append(token) + segment_ids.append(0) + tokens.append("[SEP]") + segment_ids.append(0) + + if tokens_b: + for token in tokens_b: + tokens.append(token) + segment_ids.append(1) + tokens.append("[SEP]") + segment_ids.append(1) + + input_ids = tokenizer.convert_tokens_to_ids(tokens) + + # The mask has 1 for real tokens and 0 for padding tokens. Only real + # tokens are attended to. + input_mask = [1] * len(input_ids) + + # Zero-pad up to the sequence length. + while len(input_ids) < max_seq_length: + input_ids.append(0) + input_mask.append(0) + segment_ids.append(0) + + assert len(input_ids) == max_seq_length + assert len(input_mask) == max_seq_length + assert len(segment_ids) == max_seq_length + + label_id = label_map[example.label] + if ex_index < 5: + tf.compat.v1.logging.info("*** Example ***") + tf.compat.v1.logging.info("guid: %s" % (example.guid)) + tf.compat.v1.logging.info("tokens: %s" % " ".join( + [tokenization.printable_text(x) for x in tokens])) + tf.compat.v1.logging.info("input_ids: %s" % " ".join([str(x) for x in input_ids])) + tf.compat.v1.logging.info("input_mask: %s" % " ".join([str(x) for x in input_mask])) + tf.compat.v1.logging.info("segment_ids: %s" % " ".join([str(x) for x in segment_ids])) + tf.compat.v1.logging.info("label: %s (id = %d)" % (example.label, label_id)) + + feature = InputFeatures( + input_ids=input_ids, + input_mask=input_mask, + segment_ids=segment_ids, + label_id=label_id, + is_real_example=True) + return feature + + +def file_based_convert_examples_to_features( + examples, label_list, max_seq_length, tokenizer, output_file): + """Convert a set of `InputExample`s to a TFRecord file.""" + + writer = tf.io.TFRecordWriter(output_file) + + for (ex_index, example) in enumerate(examples): + if ex_index % 10000 == 0: + tf.compat.v1.logging.info("Writing example %d of %d" % (ex_index, len(examples))) + + feature = convert_single_example(ex_index, example, label_list, + max_seq_length, tokenizer) + + def create_int_feature(values): + f = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values))) + return f + + features = collections.OrderedDict() + features["input_ids"] = create_int_feature(feature.input_ids) + features["input_mask"] = create_int_feature(feature.input_mask) + features["segment_ids"] = create_int_feature(feature.segment_ids) + features["label_ids"] = create_int_feature([feature.label_id]) + features["is_real_example"] = create_int_feature( + [int(feature.is_real_example)]) + + tf_example = tf.train.Example(features=tf.train.Features(feature=features)) + writer.write(tf_example.SerializeToString()) + writer.close() + + +def file_based_input_fn_builder(input_file, seq_length, is_training, + drop_remainder): + """Creates an `input_fn` closure to be passed to TPUEstimator.""" + + name_to_features = { + "input_ids": tf.io.FixedLenFeature([seq_length], tf.int64), + "input_mask": tf.io.FixedLenFeature([seq_length], tf.int64), + "segment_ids": tf.io.FixedLenFeature([seq_length], tf.int64), + "label_ids": tf.io.FixedLenFeature([], tf.int64), + "is_real_example": tf.io.FixedLenFeature([], tf.int64), + } + + def _decode_record(record, name_to_features): + """Decodes a record to a TensorFlow example.""" + example = tf.io.parse_single_example(serialized=record, features=name_to_features) + + # tf.Example only supports tf.int64, but the TPU only supports tf.int32. + # So cast all int64 to int32. + for name in list(example.keys()): + t = example[name] + if t.dtype == tf.int64: + t = tf.cast(t, dtype=tf.int32) + example[name] = t + + return example + + def input_fn(params): + """The actual input function.""" + batch_size = params["batch_size"] + + # For training, we want a lot of parallel reading and shuffling. + # For eval, we want no shuffling and parallel reading doesn't matter. + d = tf.data.TFRecordDataset(input_file) + if is_training: + d = d.repeat() + d = d.shuffle(buffer_size=100) + + d = d.apply( + tf.data.experimental.map_and_batch( + lambda record: _decode_record(record, name_to_features), + batch_size=batch_size, + drop_remainder=drop_remainder)) + + return d + + return input_fn + + +def _truncate_seq_pair(tokens_a, tokens_b, max_length): + """Truncates a sequence pair in place to the maximum length.""" + + # This is a simple heuristic which will always truncate the longer sequence + # one token at a time. This makes more sense than truncating an equal percent + # of tokens from each, since if one sequence is very short then each token + # that's truncated likely contains more information than a longer sequence. + while True: + total_length = len(tokens_a) + len(tokens_b) + if total_length <= max_length: + break + if len(tokens_a) > len(tokens_b): + tokens_a.pop() + else: + tokens_b.pop() + + +def create_model(bert_config, is_training, input_ids, input_mask, segment_ids, + labels, num_labels, use_one_hot_embeddings): + """Creates a classification model.""" + model = modeling.BertModel( + config=bert_config, + is_training=is_training, + input_ids=input_ids, + input_mask=input_mask, + token_type_ids=segment_ids, + use_one_hot_embeddings=use_one_hot_embeddings) + + # In the demo, we are doing a simple classification task on the entire + # segment. + # + # If you want to use the token-level output, use model.get_sequence_output() + # instead. + output_layer = model.get_pooled_output() + + hidden_size = output_layer.shape[-1].value + + output_weights = tf.compat.v1.get_variable( + "output_weights", [num_labels, hidden_size], + initializer=tf.compat.v1.truncated_normal_initializer(stddev=0.02)) + + output_bias = tf.compat.v1.get_variable( + "output_bias", [num_labels], initializer=tf.compat.v1.zeros_initializer()) + + with tf.compat.v1.variable_scope("loss"): + if is_training: + # I.e., 0.1 dropout + output_layer = tf.nn.dropout(output_layer, rate=1 - (0.9)) + + logits = tf.matmul(output_layer, output_weights, transpose_b=True) + logits = tf.nn.bias_add(logits, output_bias) + probabilities = tf.nn.softmax(logits, axis=-1) + log_probs = tf.nn.log_softmax(logits, axis=-1) + + one_hot_labels = tf.one_hot(labels, depth=num_labels, dtype=tf.float32) + + per_example_loss = -tf.reduce_sum(input_tensor=one_hot_labels * log_probs, axis=-1) + loss = tf.reduce_mean(input_tensor=per_example_loss) + + return (loss, per_example_loss, logits, probabilities) + + +def model_fn_builder(bert_config, num_labels, init_checkpoint, learning_rate, + num_train_steps, num_warmup_steps, use_tpu, + use_one_hot_embeddings): + """Returns `model_fn` closure for TPUEstimator.""" + + def model_fn(features, labels, mode, params): # pylint: disable=unused-argument + """The `model_fn` for TPUEstimator.""" + + tf.compat.v1.logging.info("*** Features ***") + for name in sorted(features.keys()): + tf.compat.v1.logging.info(" name = %s, shape = %s" % (name, features[name].shape)) + + input_ids = features["input_ids"] + input_mask = features["input_mask"] + segment_ids = features["segment_ids"] + label_ids = features["label_ids"] + is_real_example = None + if "is_real_example" in features: + is_real_example = tf.cast(features["is_real_example"], dtype=tf.float32) + else: + is_real_example = tf.ones(tf.shape(input=label_ids), dtype=tf.float32) + + is_training = (mode == tf.estimator.ModeKeys.TRAIN) + + (total_loss, per_example_loss, logits, probabilities) = create_model( + bert_config, is_training, input_ids, input_mask, segment_ids, label_ids, + num_labels, use_one_hot_embeddings) + + tvars = tf.compat.v1.trainable_variables() + initialized_variable_names = {} + scaffold_fn = None + if init_checkpoint: + (assignment_map, initialized_variable_names + ) = modeling.get_assignment_map_from_checkpoint(tvars, init_checkpoint) + if use_tpu: + + def tpu_scaffold(): + tf.compat.v1.train.init_from_checkpoint(init_checkpoint, assignment_map) + return tf.compat.v1.train.Scaffold() + + scaffold_fn = tpu_scaffold + else: + tf.compat.v1.train.init_from_checkpoint(init_checkpoint, assignment_map) + + tf.compat.v1.logging.info("**** Trainable Variables ****") + for var in tvars: + init_string = "" + if var.name in initialized_variable_names: + init_string = ", *INIT_FROM_CKPT*" + tf.compat.v1.logging.info(" name = %s, shape = %s%s", var.name, var.shape, + init_string) + + output_spec = None + if mode == tf.estimator.ModeKeys.TRAIN: + + train_op = optimization.create_optimizer( + total_loss, learning_rate, num_train_steps, num_warmup_steps, use_tpu) + + output_spec = tf.compat.v1.estimator.tpu.TPUEstimatorSpec( + mode=mode, + loss=total_loss, + train_op=train_op, + scaffold_fn=scaffold_fn) + elif mode == tf.estimator.ModeKeys.EVAL: + + def metric_fn(per_example_loss, label_ids, logits, is_real_example): + predictions = tf.argmax(input=logits, axis=-1, output_type=tf.int32) + accuracy = tf.compat.v1.metrics.accuracy( + labels=label_ids, predictions=predictions, weights=is_real_example) + loss = tf.compat.v1.metrics.mean(values=per_example_loss, weights=is_real_example) + return { + "eval_accuracy": accuracy, + "eval_loss": loss, + } + + eval_metrics = (metric_fn, + [per_example_loss, label_ids, logits, is_real_example]) + output_spec = tf.compat.v1.estimator.tpu.TPUEstimatorSpec( + mode=mode, + loss=total_loss, + eval_metrics=eval_metrics, + scaffold_fn=scaffold_fn) + else: + output_spec = tf.compat.v1.estimator.tpu.TPUEstimatorSpec( + mode=mode, + predictions={"probabilities": probabilities}, + scaffold_fn=scaffold_fn) + return output_spec + + return model_fn + + +# This function is not used by this file but is still used by the Colab and +# people who depend on it. +def input_fn_builder(features, seq_length, is_training, drop_remainder): + """Creates an `input_fn` closure to be passed to TPUEstimator.""" + + all_input_ids = [] + all_input_mask = [] + all_segment_ids = [] + all_label_ids = [] + + for feature in features: + all_input_ids.append(feature.input_ids) + all_input_mask.append(feature.input_mask) + all_segment_ids.append(feature.segment_ids) + all_label_ids.append(feature.label_id) + + def input_fn(params): + """The actual input function.""" + batch_size = params["batch_size"] + + num_examples = len(features) + + # This is for demo purposes and does NOT scale to large data sets. We do + # not use Dataset.from_generator() because that uses tf.py_func which is + # not TPU compatible. The right way to load data is with TFRecordReader. + d = tf.data.Dataset.from_tensor_slices({ + "input_ids": + tf.constant( + all_input_ids, shape=[num_examples, seq_length], + dtype=tf.int32), + "input_mask": + tf.constant( + all_input_mask, + shape=[num_examples, seq_length], + dtype=tf.int32), + "segment_ids": + tf.constant( + all_segment_ids, + shape=[num_examples, seq_length], + dtype=tf.int32), + "label_ids": + tf.constant(all_label_ids, shape=[num_examples], dtype=tf.int32), + }) + + if is_training: + d = d.repeat() + d = d.shuffle(buffer_size=100) + + d = d.batch(batch_size=batch_size, drop_remainder=drop_remainder) + return d + + return input_fn + + +# This function is not used by this file but is still used by the Colab and +# people who depend on it. +def convert_examples_to_features(examples, label_list, max_seq_length, + tokenizer): + """Convert a set of `InputExample`s to a list of `InputFeatures`.""" + + features = [] + for (ex_index, example) in enumerate(examples): + if ex_index % 10000 == 0: + tf.compat.v1.logging.info("Writing example %d of %d" % (ex_index, len(examples))) + + feature = convert_single_example(ex_index, example, label_list, + max_seq_length, tokenizer) + + features.append(feature) + return features + + +def main(_): + tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.INFO) + + processors = { + "cola": ColaProcessor, + "mnli": MnliProcessor, + "mrpc": MrpcProcessor, + "xnli": XnliProcessor, + } + + tokenization.validate_case_matches_checkpoint(FLAGS.do_lower_case, + FLAGS.init_checkpoint) + + if not FLAGS.do_train and not FLAGS.do_eval and not FLAGS.do_predict: + raise ValueError( + "At least one of `do_train`, `do_eval` or `do_predict' must be True.") + + bert_config = modeling.BertConfig.from_json_file(FLAGS.bert_config_file) + + if FLAGS.max_seq_length > bert_config.max_position_embeddings: + raise ValueError( + "Cannot use sequence length %d because the BERT model " + "was only trained up to sequence length %d" % + (FLAGS.max_seq_length, bert_config.max_position_embeddings)) + + tf.io.gfile.makedirs(FLAGS.output_dir) + + task_name = FLAGS.task_name.lower() + + if task_name not in processors: + raise ValueError("Task not found: %s" % (task_name)) + + processor = processors[task_name]() + + label_list = processor.get_labels() + + tokenizer = tokenization.FullTokenizer( + vocab_file=FLAGS.vocab_file, do_lower_case=FLAGS.do_lower_case) + + tpu_cluster_resolver = None + if FLAGS.use_tpu and FLAGS.tpu_name: + tpu_cluster_resolver = tf.distribute.cluster_resolver.TPUClusterResolver( + FLAGS.tpu_name, zone=FLAGS.tpu_zone, project=FLAGS.gcp_project) + + is_per_host = tf.compat.v1.estimator.tpu.InputPipelineConfig.PER_HOST_V2 + run_config = tf.compat.v1.estimator.tpu.RunConfig( + cluster=tpu_cluster_resolver, + master=FLAGS.master, + model_dir=FLAGS.output_dir, + save_checkpoints_steps=FLAGS.save_checkpoints_steps, + tpu_config=tf.compat.v1.estimator.tpu.TPUConfig( + iterations_per_loop=FLAGS.iterations_per_loop, + num_shards=FLAGS.num_tpu_cores, + per_host_input_for_training=is_per_host)) + + train_examples = None + num_train_steps = None + num_warmup_steps = None + if FLAGS.do_train: + train_examples = processor.get_train_examples(FLAGS.data_dir) + num_train_steps = int( + len(train_examples) / FLAGS.train_batch_size * FLAGS.num_train_epochs) + num_warmup_steps = int(num_train_steps * FLAGS.warmup_proportion) + + model_fn = model_fn_builder( + bert_config=bert_config, + num_labels=len(label_list), + init_checkpoint=FLAGS.init_checkpoint, + learning_rate=FLAGS.learning_rate, + num_train_steps=num_train_steps, + num_warmup_steps=num_warmup_steps, + use_tpu=FLAGS.use_tpu, + use_one_hot_embeddings=FLAGS.use_tpu) + + # If TPU is not available, this will fall back to normal Estimator on CPU + # or GPU. + estimator = tf.compat.v1.estimator.tpu.TPUEstimator( + use_tpu=FLAGS.use_tpu, + model_fn=model_fn, + config=run_config, + train_batch_size=FLAGS.train_batch_size, + eval_batch_size=FLAGS.eval_batch_size, + predict_batch_size=FLAGS.predict_batch_size) + + if FLAGS.do_train: + train_file = os.path.join(FLAGS.output_dir, "train.tf_record") + file_based_convert_examples_to_features( + train_examples, label_list, FLAGS.max_seq_length, tokenizer, train_file) + tf.compat.v1.logging.info("***** Running training *****") + tf.compat.v1.logging.info(" Num examples = %d", len(train_examples)) + tf.compat.v1.logging.info(" Batch size = %d", FLAGS.train_batch_size) + tf.compat.v1.logging.info(" Num steps = %d", num_train_steps) + train_input_fn = file_based_input_fn_builder( + input_file=train_file, + seq_length=FLAGS.max_seq_length, + is_training=True, + drop_remainder=True) + estimator.train(input_fn=train_input_fn, max_steps=num_train_steps) + + if FLAGS.do_eval: + eval_examples = processor.get_dev_examples(FLAGS.data_dir) + num_actual_eval_examples = len(eval_examples) + if FLAGS.use_tpu: + # TPU requires a fixed batch size for all batches, therefore the number + # of examples must be a multiple of the batch size, or else examples + # will get dropped. So we pad with fake examples which are ignored + # later on. These do NOT count towards the metric (all tf.metrics + # support a per-instance weight, and these get a weight of 0.0). + while len(eval_examples) % FLAGS.eval_batch_size != 0: + eval_examples.append(PaddingInputExample()) + + eval_file = os.path.join(FLAGS.output_dir, "eval.tf_record") + file_based_convert_examples_to_features( + eval_examples, label_list, FLAGS.max_seq_length, tokenizer, eval_file) + + tf.compat.v1.logging.info("***** Running evaluation *****") + tf.compat.v1.logging.info(" Num examples = %d (%d actual, %d padding)", + len(eval_examples), num_actual_eval_examples, + len(eval_examples) - num_actual_eval_examples) + tf.compat.v1.logging.info(" Batch size = %d", FLAGS.eval_batch_size) + + # This tells the estimator to run through the entire set. + eval_steps = None + # However, if running eval on the TPU, you will need to specify the + # number of steps. + if FLAGS.use_tpu: + assert len(eval_examples) % FLAGS.eval_batch_size == 0 + eval_steps = int(len(eval_examples) // FLAGS.eval_batch_size) + + eval_drop_remainder = True if FLAGS.use_tpu else False + eval_input_fn = file_based_input_fn_builder( + input_file=eval_file, + seq_length=FLAGS.max_seq_length, + is_training=False, + drop_remainder=eval_drop_remainder) + + result = estimator.evaluate(input_fn=eval_input_fn, steps=eval_steps) + + output_eval_file = os.path.join(FLAGS.output_dir, "eval_results.txt") + with tf.io.gfile.GFile(output_eval_file, "w") as writer: + tf.compat.v1.logging.info("***** Eval results *****") + for key in sorted(result.keys()): + tf.compat.v1.logging.info(" %s = %s", key, str(result[key])) + writer.write("%s = %s\n" % (key, str(result[key]))) + + if FLAGS.do_predict: + predict_examples = processor.get_test_examples(FLAGS.data_dir) + num_actual_predict_examples = len(predict_examples) + if FLAGS.use_tpu: + # TPU requires a fixed batch size for all batches, therefore the number + # of examples must be a multiple of the batch size, or else examples + # will get dropped. So we pad with fake examples which are ignored + # later on. + while len(predict_examples) % FLAGS.predict_batch_size != 0: + predict_examples.append(PaddingInputExample()) + + predict_file = os.path.join(FLAGS.output_dir, "predict.tf_record") + file_based_convert_examples_to_features(predict_examples, label_list, + FLAGS.max_seq_length, tokenizer, + predict_file) + + tf.compat.v1.logging.info("***** Running prediction*****") + tf.compat.v1.logging.info(" Num examples = %d (%d actual, %d padding)", + len(predict_examples), num_actual_predict_examples, + len(predict_examples) - num_actual_predict_examples) + tf.compat.v1.logging.info(" Batch size = %d", FLAGS.predict_batch_size) + + predict_drop_remainder = True if FLAGS.use_tpu else False + predict_input_fn = file_based_input_fn_builder( + input_file=predict_file, + seq_length=FLAGS.max_seq_length, + is_training=False, + drop_remainder=predict_drop_remainder) + + result = estimator.predict(input_fn=predict_input_fn) + + output_predict_file = os.path.join(FLAGS.output_dir, "test_results.tsv") + with tf.io.gfile.GFile(output_predict_file, "w") as writer: + num_written_lines = 0 + tf.compat.v1.logging.info("***** Predict results *****") + for (i, prediction) in enumerate(result): + probabilities = prediction["probabilities"] + if i >= num_actual_predict_examples: + break + output_line = "\t".join( + str(class_probability) + for class_probability in probabilities) + "\n" + writer.write(output_line) + num_written_lines += 1 + assert num_written_lines == num_actual_predict_examples + + +if __name__ == "__main__": + flags.mark_flag_as_required("data_dir") + flags.mark_flag_as_required("task_name") + flags.mark_flag_as_required("vocab_file") + flags.mark_flag_as_required("bert_config_file") + flags.mark_flag_as_required("output_dir") + tf.compat.v1.app.run() diff --git a/exp-4-bert-squad/run_classifier_with_tfhub.py b/exp-4-bert-squad/run_classifier_with_tfhub.py new file mode 100644 index 0000000..66290f5 --- /dev/null +++ b/exp-4-bert-squad/run_classifier_with_tfhub.py @@ -0,0 +1,314 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""BERT finetuning runner with TF-Hub.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import optimization +import run_classifier +import tokenization +import tensorflow as tf +import tensorflow_hub as hub + +flags = tf.flags + +FLAGS = flags.FLAGS + +flags.DEFINE_string( + "bert_hub_module_handle", None, + "Handle for the BERT TF-Hub module.") + + +def create_model(is_training, input_ids, input_mask, segment_ids, labels, + num_labels, bert_hub_module_handle): + """Creates a classification model.""" + tags = set() + if is_training: + tags.add("train") + bert_module = hub.Module(bert_hub_module_handle, tags=tags, trainable=True) + bert_inputs = dict( + input_ids=input_ids, + input_mask=input_mask, + segment_ids=segment_ids) + bert_outputs = bert_module( + inputs=bert_inputs, + signature="tokens", + as_dict=True) + + # In the demo, we are doing a simple classification task on the entire + # segment. + # + # If you want to use the token-level output, use + # bert_outputs["sequence_output"] instead. + output_layer = bert_outputs["pooled_output"] + + hidden_size = output_layer.shape[-1].value + + output_weights = tf.compat.v1.get_variable( + "output_weights", [num_labels, hidden_size], + initializer=tf.compat.v1.truncated_normal_initializer(stddev=0.02)) + + output_bias = tf.compat.v1.get_variable( + "output_bias", [num_labels], initializer=tf.compat.v1.zeros_initializer()) + + with tf.compat.v1.variable_scope("loss"): + if is_training: + # I.e., 0.1 dropout + output_layer = tf.nn.dropout(output_layer, rate=1 - (0.9)) + + logits = tf.matmul(output_layer, output_weights, transpose_b=True) + logits = tf.nn.bias_add(logits, output_bias) + probabilities = tf.nn.softmax(logits, axis=-1) + log_probs = tf.nn.log_softmax(logits, axis=-1) + + one_hot_labels = tf.one_hot(labels, depth=num_labels, dtype=tf.float32) + + per_example_loss = -tf.reduce_sum(input_tensor=one_hot_labels * log_probs, axis=-1) + loss = tf.reduce_mean(input_tensor=per_example_loss) + + return (loss, per_example_loss, logits, probabilities) + + +def model_fn_builder(num_labels, learning_rate, num_train_steps, + num_warmup_steps, use_tpu, bert_hub_module_handle): + """Returns `model_fn` closure for TPUEstimator.""" + + def model_fn(features, labels, mode, params): # pylint: disable=unused-argument + """The `model_fn` for TPUEstimator.""" + + tf.compat.v1.logging.info("*** Features ***") + for name in sorted(features.keys()): + tf.compat.v1.logging.info(" name = %s, shape = %s" % (name, features[name].shape)) + + input_ids = features["input_ids"] + input_mask = features["input_mask"] + segment_ids = features["segment_ids"] + label_ids = features["label_ids"] + + is_training = (mode == tf.estimator.ModeKeys.TRAIN) + + (total_loss, per_example_loss, logits, probabilities) = create_model( + is_training, input_ids, input_mask, segment_ids, label_ids, num_labels, + bert_hub_module_handle) + + output_spec = None + if mode == tf.estimator.ModeKeys.TRAIN: + train_op = optimization.create_optimizer( + total_loss, learning_rate, num_train_steps, num_warmup_steps, use_tpu) + + output_spec = tf.compat.v1.estimator.tpu.TPUEstimatorSpec( + mode=mode, + loss=total_loss, + train_op=train_op) + elif mode == tf.estimator.ModeKeys.EVAL: + + def metric_fn(per_example_loss, label_ids, logits): + predictions = tf.argmax(input=logits, axis=-1, output_type=tf.int32) + accuracy = tf.compat.v1.metrics.accuracy(label_ids, predictions) + loss = tf.compat.v1.metrics.mean(per_example_loss) + return { + "eval_accuracy": accuracy, + "eval_loss": loss, + } + + eval_metrics = (metric_fn, [per_example_loss, label_ids, logits]) + output_spec = tf.compat.v1.estimator.tpu.TPUEstimatorSpec( + mode=mode, + loss=total_loss, + eval_metrics=eval_metrics) + elif mode == tf.estimator.ModeKeys.PREDICT: + output_spec = tf.compat.v1.estimator.tpu.TPUEstimatorSpec( + mode=mode, predictions={"probabilities": probabilities}) + else: + raise ValueError( + "Only TRAIN, EVAL and PREDICT modes are supported: %s" % (mode)) + + return output_spec + + return model_fn + + +def create_tokenizer_from_hub_module(bert_hub_module_handle): + """Get the vocab file and casing info from the Hub module.""" + with tf.Graph().as_default(): + bert_module = hub.Module(bert_hub_module_handle) + tokenization_info = bert_module(signature="tokenization_info", as_dict=True) + with tf.compat.v1.Session() as sess: + vocab_file, do_lower_case = sess.run([tokenization_info["vocab_file"], + tokenization_info["do_lower_case"]]) + return tokenization.FullTokenizer( + vocab_file=vocab_file, do_lower_case=do_lower_case) + + +def main(_): + tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.INFO) + + processors = { + "cola": run_classifier.ColaProcessor, + "mnli": run_classifier.MnliProcessor, + "mrpc": run_classifier.MrpcProcessor, + } + + if not FLAGS.do_train and not FLAGS.do_eval: + raise ValueError("At least one of `do_train` or `do_eval` must be True.") + + tf.io.gfile.makedirs(FLAGS.output_dir) + + task_name = FLAGS.task_name.lower() + + if task_name not in processors: + raise ValueError("Task not found: %s" % (task_name)) + + processor = processors[task_name]() + + label_list = processor.get_labels() + + tokenizer = create_tokenizer_from_hub_module(FLAGS.bert_hub_module_handle) + + tpu_cluster_resolver = None + if FLAGS.use_tpu and FLAGS.tpu_name: + tpu_cluster_resolver = tf.distribute.cluster_resolver.TPUClusterResolver( + FLAGS.tpu_name, zone=FLAGS.tpu_zone, project=FLAGS.gcp_project) + + is_per_host = tf.compat.v1.estimator.tpu.InputPipelineConfig.PER_HOST_V2 + run_config = tf.compat.v1.estimator.tpu.RunConfig( + cluster=tpu_cluster_resolver, + master=FLAGS.master, + model_dir=FLAGS.output_dir, + save_checkpoints_steps=FLAGS.save_checkpoints_steps, + tpu_config=tf.compat.v1.estimator.tpu.TPUConfig( + iterations_per_loop=FLAGS.iterations_per_loop, + num_shards=FLAGS.num_tpu_cores, + per_host_input_for_training=is_per_host)) + + train_examples = None + num_train_steps = None + num_warmup_steps = None + if FLAGS.do_train: + train_examples = processor.get_train_examples(FLAGS.data_dir) + num_train_steps = int( + len(train_examples) / FLAGS.train_batch_size * FLAGS.num_train_epochs) + num_warmup_steps = int(num_train_steps * FLAGS.warmup_proportion) + + model_fn = model_fn_builder( + num_labels=len(label_list), + learning_rate=FLAGS.learning_rate, + num_train_steps=num_train_steps, + num_warmup_steps=num_warmup_steps, + use_tpu=FLAGS.use_tpu, + bert_hub_module_handle=FLAGS.bert_hub_module_handle) + + # If TPU is not available, this will fall back to normal Estimator on CPU + # or GPU. + estimator = tf.compat.v1.estimator.tpu.TPUEstimator( + use_tpu=FLAGS.use_tpu, + model_fn=model_fn, + config=run_config, + train_batch_size=FLAGS.train_batch_size, + eval_batch_size=FLAGS.eval_batch_size, + predict_batch_size=FLAGS.predict_batch_size) + + if FLAGS.do_train: + train_features = run_classifier.convert_examples_to_features( + train_examples, label_list, FLAGS.max_seq_length, tokenizer) + tf.compat.v1.logging.info("***** Running training *****") + tf.compat.v1.logging.info(" Num examples = %d", len(train_examples)) + tf.compat.v1.logging.info(" Batch size = %d", FLAGS.train_batch_size) + tf.compat.v1.logging.info(" Num steps = %d", num_train_steps) + train_input_fn = run_classifier.input_fn_builder( + features=train_features, + seq_length=FLAGS.max_seq_length, + is_training=True, + drop_remainder=True) + estimator.train(input_fn=train_input_fn, max_steps=num_train_steps) + + if FLAGS.do_eval: + eval_examples = processor.get_dev_examples(FLAGS.data_dir) + eval_features = run_classifier.convert_examples_to_features( + eval_examples, label_list, FLAGS.max_seq_length, tokenizer) + + tf.compat.v1.logging.info("***** Running evaluation *****") + tf.compat.v1.logging.info(" Num examples = %d", len(eval_examples)) + tf.compat.v1.logging.info(" Batch size = %d", FLAGS.eval_batch_size) + + # This tells the estimator to run through the entire set. + eval_steps = None + # However, if running eval on the TPU, you will need to specify the + # number of steps. + if FLAGS.use_tpu: + # Eval will be slightly WRONG on the TPU because it will truncate + # the last batch. + eval_steps = int(len(eval_examples) / FLAGS.eval_batch_size) + + eval_drop_remainder = True if FLAGS.use_tpu else False + eval_input_fn = run_classifier.input_fn_builder( + features=eval_features, + seq_length=FLAGS.max_seq_length, + is_training=False, + drop_remainder=eval_drop_remainder) + + result = estimator.evaluate(input_fn=eval_input_fn, steps=eval_steps) + + output_eval_file = os.path.join(FLAGS.output_dir, "eval_results.txt") + with tf.io.gfile.GFile(output_eval_file, "w") as writer: + tf.compat.v1.logging.info("***** Eval results *****") + for key in sorted(result.keys()): + tf.compat.v1.logging.info(" %s = %s", key, str(result[key])) + writer.write("%s = %s\n" % (key, str(result[key]))) + + if FLAGS.do_predict: + predict_examples = processor.get_test_examples(FLAGS.data_dir) + if FLAGS.use_tpu: + # Discard batch remainder if running on TPU + n = len(predict_examples) + predict_examples = predict_examples[:(n - n % FLAGS.predict_batch_size)] + + predict_file = os.path.join(FLAGS.output_dir, "predict.tf_record") + run_classifier.file_based_convert_examples_to_features( + predict_examples, label_list, FLAGS.max_seq_length, tokenizer, + predict_file) + + tf.compat.v1.logging.info("***** Running prediction*****") + tf.compat.v1.logging.info(" Num examples = %d", len(predict_examples)) + tf.compat.v1.logging.info(" Batch size = %d", FLAGS.predict_batch_size) + + predict_input_fn = run_classifier.file_based_input_fn_builder( + input_file=predict_file, + seq_length=FLAGS.max_seq_length, + is_training=False, + drop_remainder=FLAGS.use_tpu) + + result = estimator.predict(input_fn=predict_input_fn) + + output_predict_file = os.path.join(FLAGS.output_dir, "test_results.tsv") + with tf.io.gfile.GFile(output_predict_file, "w") as writer: + tf.compat.v1.logging.info("***** Predict results *****") + for prediction in result: + probabilities = prediction["probabilities"] + output_line = "\t".join( + str(class_probability) + for class_probability in probabilities) + "\n" + writer.write(output_line) + + +if __name__ == "__main__": + flags.mark_flag_as_required("data_dir") + flags.mark_flag_as_required("task_name") + flags.mark_flag_as_required("bert_hub_module_handle") + flags.mark_flag_as_required("output_dir") + tf.compat.v1.app.run() diff --git a/exp-4-bert-squad/run_pretraining.py b/exp-4-bert-squad/run_pretraining.py new file mode 100644 index 0000000..357cffb --- /dev/null +++ b/exp-4-bert-squad/run_pretraining.py @@ -0,0 +1,493 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Run masked LM/next sentence masked_lm pre-training for BERT.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import modeling +import optimization +import tensorflow as tf + +flags = tf.flags + +FLAGS = flags.FLAGS + +## Required parameters +flags.DEFINE_string( + "bert_config_file", None, + "The config json file corresponding to the pre-trained BERT model. " + "This specifies the model architecture.") + +flags.DEFINE_string( + "input_file", None, + "Input TF example files (can be a glob or comma separated).") + +flags.DEFINE_string( + "output_dir", None, + "The output directory where the model checkpoints will be written.") + +## Other parameters +flags.DEFINE_string( + "init_checkpoint", None, + "Initial checkpoint (usually from a pre-trained BERT model).") + +flags.DEFINE_integer( + "max_seq_length", 128, + "The maximum total input sequence length after WordPiece tokenization. " + "Sequences longer than this will be truncated, and sequences shorter " + "than this will be padded. Must match data generation.") + +flags.DEFINE_integer( + "max_predictions_per_seq", 20, + "Maximum number of masked LM predictions per sequence. " + "Must match data generation.") + +flags.DEFINE_bool("do_train", False, "Whether to run training.") + +flags.DEFINE_bool("do_eval", False, "Whether to run eval on the dev set.") + +flags.DEFINE_integer("train_batch_size", 32, "Total batch size for training.") + +flags.DEFINE_integer("eval_batch_size", 8, "Total batch size for eval.") + +flags.DEFINE_float("learning_rate", 5e-5, "The initial learning rate for Adam.") + +flags.DEFINE_integer("num_train_steps", 100000, "Number of training steps.") + +flags.DEFINE_integer("num_warmup_steps", 10000, "Number of warmup steps.") + +flags.DEFINE_integer("save_checkpoints_steps", 1000, + "How often to save the model checkpoint.") + +flags.DEFINE_integer("iterations_per_loop", 1000, + "How many steps to make in each estimator call.") + +flags.DEFINE_integer("max_eval_steps", 100, "Maximum number of eval steps.") + +flags.DEFINE_bool("use_tpu", False, "Whether to use TPU or GPU/CPU.") + +tf.flags.DEFINE_string( + "tpu_name", None, + "The Cloud TPU to use for training. This should be either the name " + "used when creating the Cloud TPU, or a grpc://ip.address.of.tpu:8470 " + "url.") + +tf.flags.DEFINE_string( + "tpu_zone", None, + "[Optional] GCE zone where the Cloud TPU is located in. If not " + "specified, we will attempt to automatically detect the GCE project from " + "metadata.") + +tf.flags.DEFINE_string( + "gcp_project", None, + "[Optional] Project name for the Cloud TPU-enabled project. If not " + "specified, we will attempt to automatically detect the GCE project from " + "metadata.") + +tf.flags.DEFINE_string("master", None, "[Optional] TensorFlow master URL.") + +flags.DEFINE_integer( + "num_tpu_cores", 8, + "Only used if `use_tpu` is True. Total number of TPU cores to use.") + + +def model_fn_builder(bert_config, init_checkpoint, learning_rate, + num_train_steps, num_warmup_steps, use_tpu, + use_one_hot_embeddings): + """Returns `model_fn` closure for TPUEstimator.""" + + def model_fn(features, labels, mode, params): # pylint: disable=unused-argument + """The `model_fn` for TPUEstimator.""" + + tf.compat.v1.logging.info("*** Features ***") + for name in sorted(features.keys()): + tf.compat.v1.logging.info(" name = %s, shape = %s" % (name, features[name].shape)) + + input_ids = features["input_ids"] + input_mask = features["input_mask"] + segment_ids = features["segment_ids"] + masked_lm_positions = features["masked_lm_positions"] + masked_lm_ids = features["masked_lm_ids"] + masked_lm_weights = features["masked_lm_weights"] + next_sentence_labels = features["next_sentence_labels"] + + is_training = (mode == tf.estimator.ModeKeys.TRAIN) + + model = modeling.BertModel( + config=bert_config, + is_training=is_training, + input_ids=input_ids, + input_mask=input_mask, + token_type_ids=segment_ids, + use_one_hot_embeddings=use_one_hot_embeddings) + + (masked_lm_loss, + masked_lm_example_loss, masked_lm_log_probs) = get_masked_lm_output( + bert_config, model.get_sequence_output(), model.get_embedding_table(), + masked_lm_positions, masked_lm_ids, masked_lm_weights) + + (next_sentence_loss, next_sentence_example_loss, + next_sentence_log_probs) = get_next_sentence_output( + bert_config, model.get_pooled_output(), next_sentence_labels) + + total_loss = masked_lm_loss + next_sentence_loss + + tvars = tf.compat.v1.trainable_variables() + + initialized_variable_names = {} + scaffold_fn = None + if init_checkpoint: + (assignment_map, initialized_variable_names + ) = modeling.get_assignment_map_from_checkpoint(tvars, init_checkpoint) + if use_tpu: + + def tpu_scaffold(): + tf.compat.v1.train.init_from_checkpoint(init_checkpoint, assignment_map) + return tf.compat.v1.train.Scaffold() + + scaffold_fn = tpu_scaffold + else: + tf.compat.v1.train.init_from_checkpoint(init_checkpoint, assignment_map) + + tf.compat.v1.logging.info("**** Trainable Variables ****") + for var in tvars: + init_string = "" + if var.name in initialized_variable_names: + init_string = ", *INIT_FROM_CKPT*" + tf.compat.v1.logging.info(" name = %s, shape = %s%s", var.name, var.shape, + init_string) + + output_spec = None + if mode == tf.estimator.ModeKeys.TRAIN: + train_op = optimization.create_optimizer( + total_loss, learning_rate, num_train_steps, num_warmup_steps, use_tpu) + + output_spec = tf.compat.v1.estimator.tpu.TPUEstimatorSpec( + mode=mode, + loss=total_loss, + train_op=train_op, + scaffold_fn=scaffold_fn) + elif mode == tf.estimator.ModeKeys.EVAL: + + def metric_fn(masked_lm_example_loss, masked_lm_log_probs, masked_lm_ids, + masked_lm_weights, next_sentence_example_loss, + next_sentence_log_probs, next_sentence_labels): + """Computes the loss and accuracy of the model.""" + masked_lm_log_probs = tf.reshape(masked_lm_log_probs, + [-1, masked_lm_log_probs.shape[-1]]) + masked_lm_predictions = tf.argmax( + input=masked_lm_log_probs, axis=-1, output_type=tf.int32) + masked_lm_example_loss = tf.reshape(masked_lm_example_loss, [-1]) + masked_lm_ids = tf.reshape(masked_lm_ids, [-1]) + masked_lm_weights = tf.reshape(masked_lm_weights, [-1]) + masked_lm_accuracy = tf.compat.v1.metrics.accuracy( + labels=masked_lm_ids, + predictions=masked_lm_predictions, + weights=masked_lm_weights) + masked_lm_mean_loss = tf.compat.v1.metrics.mean( + values=masked_lm_example_loss, weights=masked_lm_weights) + + next_sentence_log_probs = tf.reshape( + next_sentence_log_probs, [-1, next_sentence_log_probs.shape[-1]]) + next_sentence_predictions = tf.argmax( + input=next_sentence_log_probs, axis=-1, output_type=tf.int32) + next_sentence_labels = tf.reshape(next_sentence_labels, [-1]) + next_sentence_accuracy = tf.compat.v1.metrics.accuracy( + labels=next_sentence_labels, predictions=next_sentence_predictions) + next_sentence_mean_loss = tf.compat.v1.metrics.mean( + values=next_sentence_example_loss) + + return { + "masked_lm_accuracy": masked_lm_accuracy, + "masked_lm_loss": masked_lm_mean_loss, + "next_sentence_accuracy": next_sentence_accuracy, + "next_sentence_loss": next_sentence_mean_loss, + } + + eval_metrics = (metric_fn, [ + masked_lm_example_loss, masked_lm_log_probs, masked_lm_ids, + masked_lm_weights, next_sentence_example_loss, + next_sentence_log_probs, next_sentence_labels + ]) + output_spec = tf.compat.v1.estimator.tpu.TPUEstimatorSpec( + mode=mode, + loss=total_loss, + eval_metrics=eval_metrics, + scaffold_fn=scaffold_fn) + else: + raise ValueError("Only TRAIN and EVAL modes are supported: %s" % (mode)) + + return output_spec + + return model_fn + + +def get_masked_lm_output(bert_config, input_tensor, output_weights, positions, + label_ids, label_weights): + """Get loss and log probs for the masked LM.""" + input_tensor = gather_indexes(input_tensor, positions) + + with tf.compat.v1.variable_scope("cls/predictions"): + # We apply one more non-linear transformation before the output layer. + # This matrix is not used after pre-training. + with tf.compat.v1.variable_scope("transform"): + input_tensor = tf.compat.v1.layers.dense( + input_tensor, + units=bert_config.hidden_size, + activation=modeling.get_activation(bert_config.hidden_act), + kernel_initializer=modeling.create_initializer( + bert_config.initializer_range)) + input_tensor = modeling.layer_norm(input_tensor) + + # The output weights are the same as the input embeddings, but there is + # an output-only bias for each token. + output_bias = tf.compat.v1.get_variable( + "output_bias", + shape=[bert_config.vocab_size], + initializer=tf.compat.v1.zeros_initializer()) + logits = tf.matmul(input_tensor, output_weights, transpose_b=True) + logits = tf.nn.bias_add(logits, output_bias) + log_probs = tf.nn.log_softmax(logits, axis=-1) + + label_ids = tf.reshape(label_ids, [-1]) + label_weights = tf.reshape(label_weights, [-1]) + + one_hot_labels = tf.one_hot( + label_ids, depth=bert_config.vocab_size, dtype=tf.float32) + + # The `positions` tensor might be zero-padded (if the sequence is too + # short to have the maximum number of predictions). The `label_weights` + # tensor has a value of 1.0 for every real prediction and 0.0 for the + # padding predictions. + per_example_loss = -tf.reduce_sum(input_tensor=log_probs * one_hot_labels, axis=[-1]) + numerator = tf.reduce_sum(input_tensor=label_weights * per_example_loss) + denominator = tf.reduce_sum(input_tensor=label_weights) + 1e-5 + loss = numerator / denominator + + return (loss, per_example_loss, log_probs) + + +def get_next_sentence_output(bert_config, input_tensor, labels): + """Get loss and log probs for the next sentence prediction.""" + + # Simple binary classification. Note that 0 is "next sentence" and 1 is + # "random sentence". This weight matrix is not used after pre-training. + with tf.compat.v1.variable_scope("cls/seq_relationship"): + output_weights = tf.compat.v1.get_variable( + "output_weights", + shape=[2, bert_config.hidden_size], + initializer=modeling.create_initializer(bert_config.initializer_range)) + output_bias = tf.compat.v1.get_variable( + "output_bias", shape=[2], initializer=tf.compat.v1.zeros_initializer()) + + logits = tf.matmul(input_tensor, output_weights, transpose_b=True) + logits = tf.nn.bias_add(logits, output_bias) + log_probs = tf.nn.log_softmax(logits, axis=-1) + labels = tf.reshape(labels, [-1]) + one_hot_labels = tf.one_hot(labels, depth=2, dtype=tf.float32) + per_example_loss = -tf.reduce_sum(input_tensor=one_hot_labels * log_probs, axis=-1) + loss = tf.reduce_mean(input_tensor=per_example_loss) + return (loss, per_example_loss, log_probs) + + +def gather_indexes(sequence_tensor, positions): + """Gathers the vectors at the specific positions over a minibatch.""" + sequence_shape = modeling.get_shape_list(sequence_tensor, expected_rank=3) + batch_size = sequence_shape[0] + seq_length = sequence_shape[1] + width = sequence_shape[2] + + flat_offsets = tf.reshape( + tf.range(0, batch_size, dtype=tf.int32) * seq_length, [-1, 1]) + flat_positions = tf.reshape(positions + flat_offsets, [-1]) + flat_sequence_tensor = tf.reshape(sequence_tensor, + [batch_size * seq_length, width]) + output_tensor = tf.gather(flat_sequence_tensor, flat_positions) + return output_tensor + + +def input_fn_builder(input_files, + max_seq_length, + max_predictions_per_seq, + is_training, + num_cpu_threads=4): + """Creates an `input_fn` closure to be passed to TPUEstimator.""" + + def input_fn(params): + """The actual input function.""" + batch_size = params["batch_size"] + + name_to_features = { + "input_ids": + tf.io.FixedLenFeature([max_seq_length], tf.int64), + "input_mask": + tf.io.FixedLenFeature([max_seq_length], tf.int64), + "segment_ids": + tf.io.FixedLenFeature([max_seq_length], tf.int64), + "masked_lm_positions": + tf.io.FixedLenFeature([max_predictions_per_seq], tf.int64), + "masked_lm_ids": + tf.io.FixedLenFeature([max_predictions_per_seq], tf.int64), + "masked_lm_weights": + tf.io.FixedLenFeature([max_predictions_per_seq], tf.float32), + "next_sentence_labels": + tf.io.FixedLenFeature([1], tf.int64), + } + + # For training, we want a lot of parallel reading and shuffling. + # For eval, we want no shuffling and parallel reading doesn't matter. + if is_training: + d = tf.data.Dataset.from_tensor_slices(tf.constant(input_files)) + d = d.repeat() + d = d.shuffle(buffer_size=len(input_files)) + + # `cycle_length` is the number of parallel files that get read. + cycle_length = min(num_cpu_threads, len(input_files)) + + # `sloppy` mode means that the interleaving is not exact. This adds + # even more randomness to the training pipeline. + d = d.apply( + tf.data.experimental.parallel_interleave( + tf.data.TFRecordDataset, + sloppy=is_training, + cycle_length=cycle_length)) + d = d.shuffle(buffer_size=100) + else: + d = tf.data.TFRecordDataset(input_files) + # Since we evaluate for a fixed number of steps we don't want to encounter + # out-of-range exceptions. + d = d.repeat() + + # We must `drop_remainder` on training because the TPU requires fixed + # size dimensions. For eval, we assume we are evaluating on the CPU or GPU + # and we *don't* want to drop the remainder, otherwise we wont cover + # every sample. + d = d.apply( + tf.data.experimental.map_and_batch( + lambda record: _decode_record(record, name_to_features), + batch_size=batch_size, + num_parallel_batches=num_cpu_threads, + drop_remainder=True)) + return d + + return input_fn + + +def _decode_record(record, name_to_features): + """Decodes a record to a TensorFlow example.""" + example = tf.io.parse_single_example(serialized=record, features=name_to_features) + + # tf.Example only supports tf.int64, but the TPU only supports tf.int32. + # So cast all int64 to int32. + for name in list(example.keys()): + t = example[name] + if t.dtype == tf.int64: + t = tf.cast(t, dtype=tf.int32) + example[name] = t + + return example + + +def main(_): + tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.INFO) + + if not FLAGS.do_train and not FLAGS.do_eval: + raise ValueError("At least one of `do_train` or `do_eval` must be True.") + + bert_config = modeling.BertConfig.from_json_file(FLAGS.bert_config_file) + + tf.io.gfile.makedirs(FLAGS.output_dir) + + input_files = [] + for input_pattern in FLAGS.input_file.split(","): + input_files.extend(tf.io.gfile.glob(input_pattern)) + + tf.compat.v1.logging.info("*** Input Files ***") + for input_file in input_files: + tf.compat.v1.logging.info(" %s" % input_file) + + tpu_cluster_resolver = None + if FLAGS.use_tpu and FLAGS.tpu_name: + tpu_cluster_resolver = tf.distribute.cluster_resolver.TPUClusterResolver( + FLAGS.tpu_name, zone=FLAGS.tpu_zone, project=FLAGS.gcp_project) + + is_per_host = tf.compat.v1.estimator.tpu.InputPipelineConfig.PER_HOST_V2 + run_config = tf.compat.v1.estimator.tpu.RunConfig( + cluster=tpu_cluster_resolver, + master=FLAGS.master, + model_dir=FLAGS.output_dir, + save_checkpoints_steps=FLAGS.save_checkpoints_steps, + tpu_config=tf.compat.v1.estimator.tpu.TPUConfig( + iterations_per_loop=FLAGS.iterations_per_loop, + num_shards=FLAGS.num_tpu_cores, + per_host_input_for_training=is_per_host)) + + model_fn = model_fn_builder( + bert_config=bert_config, + init_checkpoint=FLAGS.init_checkpoint, + learning_rate=FLAGS.learning_rate, + num_train_steps=FLAGS.num_train_steps, + num_warmup_steps=FLAGS.num_warmup_steps, + use_tpu=FLAGS.use_tpu, + use_one_hot_embeddings=FLAGS.use_tpu) + + # If TPU is not available, this will fall back to normal Estimator on CPU + # or GPU. + estimator = tf.compat.v1.estimator.tpu.TPUEstimator( + use_tpu=FLAGS.use_tpu, + model_fn=model_fn, + config=run_config, + train_batch_size=FLAGS.train_batch_size, + eval_batch_size=FLAGS.eval_batch_size) + + if FLAGS.do_train: + tf.compat.v1.logging.info("***** Running training *****") + tf.compat.v1.logging.info(" Batch size = %d", FLAGS.train_batch_size) + train_input_fn = input_fn_builder( + input_files=input_files, + max_seq_length=FLAGS.max_seq_length, + max_predictions_per_seq=FLAGS.max_predictions_per_seq, + is_training=True) + estimator.train(input_fn=train_input_fn, max_steps=FLAGS.num_train_steps) + + if FLAGS.do_eval: + tf.compat.v1.logging.info("***** Running evaluation *****") + tf.compat.v1.logging.info(" Batch size = %d", FLAGS.eval_batch_size) + + eval_input_fn = input_fn_builder( + input_files=input_files, + max_seq_length=FLAGS.max_seq_length, + max_predictions_per_seq=FLAGS.max_predictions_per_seq, + is_training=False) + + result = estimator.evaluate( + input_fn=eval_input_fn, steps=FLAGS.max_eval_steps) + + output_eval_file = os.path.join(FLAGS.output_dir, "eval_results.txt") + with tf.io.gfile.GFile(output_eval_file, "w") as writer: + tf.compat.v1.logging.info("***** Eval results *****") + for key in sorted(result.keys()): + tf.compat.v1.logging.info(" %s = %s", key, str(result[key])) + writer.write("%s = %s\n" % (key, str(result[key]))) + + +if __name__ == "__main__": + flags.mark_flag_as_required("input_file") + flags.mark_flag_as_required("bert_config_file") + flags.mark_flag_as_required("output_dir") + tf.compat.v1.app.run() diff --git a/exp-4-bert-squad/run_squad.py b/exp-4-bert-squad/run_squad.py new file mode 100644 index 0000000..f70c00e --- /dev/null +++ b/exp-4-bert-squad/run_squad.py @@ -0,0 +1,1503 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Run BERT on SQuAD 1.1 and SQuAD 2.0.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import collections +import json +import math +import os +import random +import modeling +import optimization +import tokenization +import six +import tensorflow as tf +import tf_slim as slim + +# from codecarbon import EmissionsTracker +# from carbontracker.tracker import CarbonTracker +# from carbontracker import parser as CTparser +# import eco2ai +# from experiment_impact_tracker.compute_tracker import ImpactTracker + +# --- FOR CALCULATORS +import time +import sys +_path = '.' +sys.path.append(os.path.join(_path)) +from tqdm import tqdm +from fct_for_saving import save_cc +from fct_for_saving import save_ct +from fct_for_saving import save_eco2ai +from fct_for_saving import save_ES +from fct_for_saving import save_FLOPS +from fct_for_saving import save_ga +from fct_for_saving import save_nocalc +from fct_for_saving import save_tapo +from fct_for_tapo import stop_TAPO +import psutil +import GPUtil +from fct_for_ga import stop_UTIL, mean_parallel_UTIL +from fct_for_experiments import ExpParams +from fct_for_experiments import prepare_calculator +from fct_for_experiments import start_calculators +from fct_for_experiments import stop_calculators +from fct_for_experiments import flops_method_tensorflow +# --------------------- + + +flags = tf.compat.v1.flags + +FLAGS = flags.FLAGS + +## Required parameters +flags.DEFINE_string( + "bert_config_file", None, + "The config json file corresponding to the pre-trained BERT model. " + "This specifies the model architecture.") + +flags.DEFINE_string("vocab_file", None, + "The vocabulary file that the BERT model was trained on.") + +flags.DEFINE_string( + "output_dir", None, + "The output directory where the model checkpoints will be written.") + +## Other parameters + +# flags.DEFINE_string("calculator", None, +# "calculator to meassure energy usage (Use carbon_tracker or code_carbon or eco2ai or impact_tracker)") + +flags.DEFINE_string("train_file", None, + "SQuAD json for training. E.g., train-v1.1.json") + +flags.DEFINE_string( + "predict_file", None, + "SQuAD json for predictions. E.g., dev-v1.1.json or test-v1.1.json") + +flags.DEFINE_string( + "init_checkpoint", None, + "Initial checkpoint (usually from a pre-trained BERT model).") + +flags.DEFINE_bool( + "do_lower_case", True, + "Whether to lower case the input text. Should be True for uncased " + "models and False for cased models.") + +flags.DEFINE_integer( + "max_seq_length", 384, + "The maximum total input sequence length after WordPiece tokenization. " + "Sequences longer than this will be truncated, and sequences shorter " + "than this will be padded.") + +flags.DEFINE_integer( + "doc_stride", 128, + "When splitting up a long document into chunks, how much stride to " + "take between chunks.") + +flags.DEFINE_integer( + "max_query_length", 64, + "The maximum number of tokens for the question. Questions longer than " + "this will be truncated to this length.") + +flags.DEFINE_bool( + "do_train", False, + "Whether to run training.") + +flags.DEFINE_bool( + "do_predict", False, + "Whether to run eval on the dev set.") + +flags.DEFINE_integer( + "train_batch_size", 32, + "Total batch size for training.") + +flags.DEFINE_integer( + "predict_batch_size", 8, + "Total batch size for predictions.") + +flags.DEFINE_float( + "learning_rate", 5e-5, + "The initial learning rate for Adam.") + +flags.DEFINE_float( + "num_train_epochs", 3.0, + "Total number of training epochs to perform.") + +flags.DEFINE_float( + "warmup_proportion", 0.1, + "Proportion of training to perform linear learning rate warmup for. " + "E.g., 0.1 = 10% of training.") + +flags.DEFINE_integer("save_checkpoints_steps", 1000, + "How often to save the model checkpoint.") + +flags.DEFINE_integer("iterations_per_loop", 1000, + "How many steps to make in each estimator call.") + +flags.DEFINE_integer( + "n_best_size", 20, + "The total number of n-best predictions to generate in the " + "nbest_predictions.json output file.") + +flags.DEFINE_integer( + "max_answer_length", 30, + "The maximum length of an answer that can be generated. This is needed " + "because the start and end predictions are not conditioned on one another.") + +flags.DEFINE_bool( + "use_tpu", False, + "Whether to use TPU or GPU/CPU.") + +tf.compat.v1.flags.DEFINE_string( + "tpu_name", None, + "The Cloud TPU to use for training. This should be either the name " + "used when creating the Cloud TPU, or a grpc://ip.address.of.tpu:8470 " + "url.") + +tf.compat.v1.flags.DEFINE_string( + "tpu_zone", None, + "[Optional] GCE zone where the Cloud TPU is located in. If not " + "specified, we will attempt to automatically detect the GCE project from " + "metadata.") + +tf.compat.v1.flags.DEFINE_string( + "gcp_project", None, + "[Optional] Project name for the Cloud TPU-enabled project. If not " + "specified, we will attempt to automatically detect the GCE project from " + "metadata.") + +tf.compat.v1.flags.DEFINE_string( + "master", None, + "[Optional] TensorFlow master URL.") + +flags.DEFINE_integer( + "num_tpu_cores", 8, + "Only used if `use_tpu` is True. Total number of TPU cores to use.") + +flags.DEFINE_bool( + "verbose_logging", False, + "If true, all of the warnings related to data processing will be printed. " + "A number of warnings are expected for a normal SQuAD evaluation.") + +flags.DEFINE_bool( + "version_2_with_negative", False, + "If true, the SQuAD examples contain some that do not have an answer.") + +flags.DEFINE_float( + "null_score_diff_threshold", 0.0, + "If null_score - best_non_null is greater than the threshold predict null.") + +# --- FOR CALCULATORS +flags.DEFINE_string( + "calculator", "no_calculator", + "Calculator used to meassure energy usage." + ) +flags.DEFINE_string( + "calculator_mode", "", + "Specify the mode for some calculators ." + ) +flags.DEFINE_integer( + "nb_batch_inferences", 1, + "Number of inferences we track with the calculator" + ) +flags.DEFINE_string( + "name_exp", "SQUAD-extracted", + "Name of the ML experiment we are evaluating." + ) +flags.DEFINE_string( + "computer", "linux_alienware", + "Computer on which we are running the experiment." + ) +flags.DEFINE_string( + "path_logs_and_results", ".", + "Folder in which we store logs and calculators' outputs." + ) +flags.DEFINE_string( + "use_accelerator", "True", + "Use gpu or not." + ) +# ------------------- + + + + + + +class SquadExample(object): + """A single training/test example for simple sequence classification. + + For examples without an answer, the start and end position are -1. + """ + + def __init__(self, + qas_id, + question_text, + doc_tokens, + orig_answer_text=None, + start_position=None, + end_position=None, + is_impossible=False): + self.qas_id = qas_id + self.question_text = question_text + self.doc_tokens = doc_tokens + self.orig_answer_text = orig_answer_text + self.start_position = start_position + self.end_position = end_position + self.is_impossible = is_impossible + + def __str__(self): + return self.__repr__() + + def __repr__(self): + s = "" + s += "qas_id: %s" % (tokenization.printable_text(self.qas_id)) + s += ", question_text: %s" % ( + tokenization.printable_text(self.question_text)) + s += ", doc_tokens: [%s]" % (" ".join(self.doc_tokens)) + if self.start_position: + s += ", start_position: %d" % (self.start_position) + if self.start_position: + s += ", end_position: %d" % (self.end_position) + if self.start_position: + s += ", is_impossible: %r" % (self.is_impossible) + return s + + +class InputFeatures(object): + """A single set of features of data.""" + + def __init__(self, + unique_id, + example_index, + doc_span_index, + tokens, + token_to_orig_map, + token_is_max_context, + input_ids, + input_mask, + segment_ids, + start_position=None, + end_position=None, + is_impossible=None): + self.unique_id = unique_id + self.example_index = example_index + self.doc_span_index = doc_span_index + self.tokens = tokens + self.token_to_orig_map = token_to_orig_map + self.token_is_max_context = token_is_max_context + self.input_ids = input_ids + self.input_mask = input_mask + self.segment_ids = segment_ids + self.start_position = start_position + self.end_position = end_position + self.is_impossible = is_impossible + + +def read_squad_examples(input_file, is_training): + """Read a SQuAD json file into a list of SquadExample.""" + with tf.io.gfile.GFile(input_file, "r") as reader: + input_data = json.load(reader)["data"] + + def is_whitespace(c): + if c == " " or c == "\t" or c == "\r" or c == "\n" or ord(c) == 0x202F: + return True + return False + + examples = [] + for entry in input_data: + for paragraph in entry["paragraphs"]: + paragraph_text = paragraph["context"] + doc_tokens = [] + char_to_word_offset = [] + prev_is_whitespace = True + for c in paragraph_text: + if is_whitespace(c): + prev_is_whitespace = True + else: + if prev_is_whitespace: + doc_tokens.append(c) + else: + doc_tokens[-1] += c + prev_is_whitespace = False + char_to_word_offset.append(len(doc_tokens) - 1) + + for qa in paragraph["qas"]: + qas_id = qa["id"] + question_text = qa["question"] + start_position = None + end_position = None + orig_answer_text = None + is_impossible = False + if is_training: + + if FLAGS.version_2_with_negative: + is_impossible = qa["is_impossible"] + if (len(qa["answers"]) != 1) and (not is_impossible): + raise ValueError( + "For training, each question should have exactly 1 answer.") + if not is_impossible: + answer = qa["answers"][0] + orig_answer_text = answer["text"] + answer_offset = answer["answer_start"] + answer_length = len(orig_answer_text) + start_position = char_to_word_offset[answer_offset] + end_position = char_to_word_offset[answer_offset + answer_length - + 1] + # Only add answers where the text can be exactly recovered from the + # document. If this CAN'T happen it's likely due to weird Unicode + # stuff so we will just skip the example. + # + # Note that this means for training mode, every example is NOT + # guaranteed to be preserved. + actual_text = " ".join( + doc_tokens[start_position:(end_position + 1)]) + cleaned_answer_text = " ".join( + tokenization.whitespace_tokenize(orig_answer_text)) + if actual_text.find(cleaned_answer_text) == -1: + tf.compat.v1.logging.warning("Could not find answer: '%s' vs. '%s'", + actual_text, cleaned_answer_text) + continue + else: + start_position = -1 + end_position = -1 + orig_answer_text = "" + + example = SquadExample( + qas_id=qas_id, + question_text=question_text, + doc_tokens=doc_tokens, + orig_answer_text=orig_answer_text, + start_position=start_position, + end_position=end_position, + is_impossible=is_impossible) + examples.append(example) + + return examples + + +def convert_examples_to_features(examples, tokenizer, max_seq_length, + doc_stride, max_query_length, is_training, + output_fn): + """Loads a data file into a list of `InputBatch`s.""" + + unique_id = 1000000000 + + for (example_index, example) in enumerate(examples): + query_tokens = tokenizer.tokenize(example.question_text) + + if len(query_tokens) > max_query_length: + query_tokens = query_tokens[0:max_query_length] + + tok_to_orig_index = [] + orig_to_tok_index = [] + all_doc_tokens = [] + for (i, token) in enumerate(example.doc_tokens): + orig_to_tok_index.append(len(all_doc_tokens)) + sub_tokens = tokenizer.tokenize(token) + for sub_token in sub_tokens: + tok_to_orig_index.append(i) + all_doc_tokens.append(sub_token) + + tok_start_position = None + tok_end_position = None + if is_training and example.is_impossible: + tok_start_position = -1 + tok_end_position = -1 + if is_training and not example.is_impossible: + tok_start_position = orig_to_tok_index[example.start_position] + if example.end_position < len(example.doc_tokens) - 1: + tok_end_position = orig_to_tok_index[example.end_position + 1] - 1 + else: + tok_end_position = len(all_doc_tokens) - 1 + (tok_start_position, tok_end_position) = _improve_answer_span( + all_doc_tokens, tok_start_position, tok_end_position, tokenizer, + example.orig_answer_text) + + # The -3 accounts for [CLS], [SEP] and [SEP] + max_tokens_for_doc = max_seq_length - len(query_tokens) - 3 + + # We can have documents that are longer than the maximum sequence length. + # To deal with this we do a sliding window approach, where we take chunks + # of the up to our max length with a stride of `doc_stride`. + _DocSpan = collections.namedtuple( # pylint: disable=invalid-name + "DocSpan", ["start", "length"]) + doc_spans = [] + start_offset = 0 + while start_offset < len(all_doc_tokens): + length = len(all_doc_tokens) - start_offset + if length > max_tokens_for_doc: + length = max_tokens_for_doc + doc_spans.append(_DocSpan(start=start_offset, length=length)) + if start_offset + length == len(all_doc_tokens): + break + start_offset += min(length, doc_stride) + + for (doc_span_index, doc_span) in enumerate(doc_spans): + tokens = [] + token_to_orig_map = {} + token_is_max_context = {} + segment_ids = [] + tokens.append("[CLS]") + segment_ids.append(0) + for token in query_tokens: + tokens.append(token) + segment_ids.append(0) + tokens.append("[SEP]") + segment_ids.append(0) + + for i in range(doc_span.length): + split_token_index = doc_span.start + i + token_to_orig_map[len(tokens)] = tok_to_orig_index[split_token_index] + + is_max_context = _check_is_max_context(doc_spans, doc_span_index, + split_token_index) + token_is_max_context[len(tokens)] = is_max_context + tokens.append(all_doc_tokens[split_token_index]) + segment_ids.append(1) + tokens.append("[SEP]") + segment_ids.append(1) + + input_ids = tokenizer.convert_tokens_to_ids(tokens) + + # The mask has 1 for real tokens and 0 for padding tokens. Only real + # tokens are attended to. + input_mask = [1] * len(input_ids) + + # Zero-pad up to the sequence length. + while len(input_ids) < max_seq_length: + input_ids.append(0) + input_mask.append(0) + segment_ids.append(0) + + assert len(input_ids) == max_seq_length + assert len(input_mask) == max_seq_length + assert len(segment_ids) == max_seq_length + + start_position = None + end_position = None + if is_training and not example.is_impossible: + # For training, if our document chunk does not contain an annotation + # we throw it out, since there is nothing to predict. + doc_start = doc_span.start + doc_end = doc_span.start + doc_span.length - 1 + out_of_span = False + if not (tok_start_position >= doc_start and + tok_end_position <= doc_end): + out_of_span = True + if out_of_span: + start_position = 0 + end_position = 0 + else: + doc_offset = len(query_tokens) + 2 + start_position = tok_start_position - doc_start + doc_offset + end_position = tok_end_position - doc_start + doc_offset + + if is_training and example.is_impossible: + start_position = 0 + end_position = 0 + + if example_index < 20: + tf.compat.v1.logging.info("*** Example ***") + tf.compat.v1.logging.info("unique_id: %s" % (unique_id)) + tf.compat.v1.logging.info("example_index: %s" % (example_index)) + tf.compat.v1.logging.info("doc_span_index: %s" % (doc_span_index)) + tf.compat.v1.logging.info("tokens: %s" % " ".join( + [tokenization.printable_text(x) for x in tokens])) + tf.compat.v1.logging.info("token_to_orig_map: %s" % " ".join( + ["%d:%d" % (x, y) for (x, y) in six.iteritems(token_to_orig_map)])) + tf.compat.v1.logging.info("token_is_max_context: %s" % " ".join([ + "%d:%s" % (x, y) for (x, y) in six.iteritems(token_is_max_context) + ])) + tf.compat.v1.logging.info("input_ids: %s" % " ".join([str(x) for x in input_ids])) + tf.compat.v1.logging.info( + "input_mask: %s" % " ".join([str(x) for x in input_mask])) + tf.compat.v1.logging.info( + "segment_ids: %s" % " ".join([str(x) for x in segment_ids])) + if is_training and example.is_impossible: + tf.compat.v1.logging.info("impossible example") + if is_training and not example.is_impossible: + answer_text = " ".join(tokens[start_position:(end_position + 1)]) + tf.compat.v1.logging.info("start_position: %d" % (start_position)) + tf.compat.v1.logging.info("end_position: %d" % (end_position)) + tf.compat.v1.logging.info( + "answer: %s" % (tokenization.printable_text(answer_text))) + + feature = InputFeatures( + unique_id=unique_id, + example_index=example_index, + doc_span_index=doc_span_index, + tokens=tokens, + token_to_orig_map=token_to_orig_map, + token_is_max_context=token_is_max_context, + input_ids=input_ids, + input_mask=input_mask, + segment_ids=segment_ids, + start_position=start_position, + end_position=end_position, + is_impossible=example.is_impossible) + + # Run callback + output_fn(feature) + + unique_id += 1 + + +def _improve_answer_span(doc_tokens, input_start, input_end, tokenizer, + orig_answer_text): + """Returns tokenized answer spans that better match the annotated answer.""" + + # The SQuAD annotations are character based. We first project them to + # whitespace-tokenized words. But then after WordPiece tokenization, we can + # often find a "better match". For example: + # + # Question: What year was John Smith born? + # Context: The leader was John Smith (1895-1943). + # Answer: 1895 + # + # The original whitespace-tokenized answer will be "(1895-1943).". However + # after tokenization, our tokens will be "( 1895 - 1943 ) .". So we can match + # the exact answer, 1895. + # + # However, this is not always possible. Consider the following: + # + # Question: What country is the top exporter of electornics? + # Context: The Japanese electronics industry is the lagest in the world. + # Answer: Japan + # + # In this case, the annotator chose "Japan" as a character sub-span of + # the word "Japanese". Since our WordPiece tokenizer does not split + # "Japanese", we just use "Japanese" as the annotation. This is fairly rare + # in SQuAD, but does happen. + tok_answer_text = " ".join(tokenizer.tokenize(orig_answer_text)) + + for new_start in range(input_start, input_end + 1): + for new_end in range(input_end, new_start - 1, -1): + text_span = " ".join(doc_tokens[new_start:(new_end + 1)]) + if text_span == tok_answer_text: + return (new_start, new_end) + + return (input_start, input_end) + + +def _check_is_max_context(doc_spans, cur_span_index, position): + """Check if this is the 'max context' doc span for the token.""" + + # Because of the sliding window approach taken to scoring documents, a single + # token can appear in multiple documents. E.g. + # Doc: the man went to the store and bought a gallon of milk + # Span A: the man went to the + # Span B: to the store and bought + # Span C: and bought a gallon of + # ... + # + # Now the word 'bought' will have two scores from spans B and C. We only + # want to consider the score with "maximum context", which we define as + # the *minimum* of its left and right context (the *sum* of left and + # right context will always be the same, of course). + # + # In the example the maximum context for 'bought' would be span C since + # it has 1 left context and 3 right context, while span B has 4 left context + # and 0 right context. + best_score = None + best_span_index = None + for (span_index, doc_span) in enumerate(doc_spans): + end = doc_span.start + doc_span.length - 1 + if position < doc_span.start: + continue + if position > end: + continue + num_left_context = position - doc_span.start + num_right_context = end - position + score = min(num_left_context, num_right_context) + 0.01 * doc_span.length + if best_score is None or score > best_score: + best_score = score + best_span_index = span_index + + return cur_span_index == best_span_index + + +def create_model(bert_config, is_training, input_ids, input_mask, segment_ids, + use_one_hot_embeddings): + """Creates a classification model.""" + model = modeling.BertModel( + config=bert_config, + is_training=is_training, + input_ids=input_ids, + input_mask=input_mask, + token_type_ids=segment_ids, + use_one_hot_embeddings=use_one_hot_embeddings) + + final_hidden = model.get_sequence_output() + + final_hidden_shape = modeling.get_shape_list(final_hidden, expected_rank=3) + batch_size = final_hidden_shape[0] + seq_length = final_hidden_shape[1] + hidden_size = final_hidden_shape[2] + + output_weights = tf.compat.v1.get_variable( + "cls/squad/output_weights", [2, hidden_size], + initializer=tf.compat.v1.truncated_normal_initializer(stddev=0.02)) + + output_bias = tf.compat.v1.get_variable( + "cls/squad/output_bias", [2], initializer=tf.compat.v1.zeros_initializer()) + + final_hidden_matrix = tf.reshape(final_hidden, + [batch_size * seq_length, hidden_size]) + logits = tf.matmul(final_hidden_matrix, output_weights, transpose_b=True) + logits = tf.nn.bias_add(logits, output_bias) + + logits = tf.reshape(logits, [batch_size, seq_length, 2]) + logits = tf.transpose(a=logits, perm=[2, 0, 1]) + + unstacked_logits = tf.unstack(logits, axis=0) + + (start_logits, end_logits) = (unstacked_logits[0], unstacked_logits[1]) + + return (start_logits, end_logits) + + +def model_fn_builder(bert_config, init_checkpoint, learning_rate, + num_train_steps, num_warmup_steps, use_tpu, + use_one_hot_embeddings): + """Returns `model_fn` closure for TPUEstimator.""" + + def model_fn(features, labels, mode, params): # pylint: disable=unused-argument + """The `model_fn` for TPUEstimator.""" + + tf.compat.v1.logging.info("*** Features ***") + for name in sorted(features.keys()): + tf.compat.v1.logging.info(" name = %s, shape = %s" % (name, features[name].shape)) + + unique_ids = features["unique_ids"] + input_ids = features["input_ids"] + input_mask = features["input_mask"] + segment_ids = features["segment_ids"] + + is_training = (mode == tf.estimator.ModeKeys.TRAIN) + + (start_logits, end_logits) = create_model( + bert_config=bert_config, + is_training=is_training, + input_ids=input_ids, + input_mask=input_mask, + segment_ids=segment_ids, + use_one_hot_embeddings=use_one_hot_embeddings) + + tvars = tf.compat.v1.trainable_variables() + + initialized_variable_names = {} + scaffold_fn = None + if init_checkpoint: + (assignment_map, initialized_variable_names + ) = modeling.get_assignment_map_from_checkpoint(tvars, init_checkpoint) + if use_tpu: + + def tpu_scaffold(): + tf.compat.v1.train.init_from_checkpoint(init_checkpoint, assignment_map) + return tf.compat.v1.train.Scaffold() + + scaffold_fn = tpu_scaffold + else: + tf.compat.v1.train.init_from_checkpoint(init_checkpoint, assignment_map) + + tf.compat.v1.logging.info("**** Trainable Variables ****") + for var in tvars: + init_string = "" + if var.name in initialized_variable_names: + init_string = ", *INIT_FROM_CKPT*" + tf.compat.v1.logging.info(" name = %s, shape = %s%s", var.name, var.shape, + init_string) + + output_spec = None + if mode == tf.estimator.ModeKeys.TRAIN: + seq_length = modeling.get_shape_list(input_ids)[1] + + def compute_loss(logits, positions): + one_hot_positions = tf.one_hot( + positions, depth=seq_length, dtype=tf.float32) + log_probs = tf.nn.log_softmax(logits, axis=-1) + loss = -tf.reduce_mean( + input_tensor=tf.reduce_sum(input_tensor=one_hot_positions * log_probs, axis=-1)) + return loss + + start_positions = features["start_positions"] + end_positions = features["end_positions"] + + start_loss = compute_loss(start_logits, start_positions) + end_loss = compute_loss(end_logits, end_positions) + + total_loss = (start_loss + end_loss) / 2.0 + + train_op = optimization.create_optimizer( + total_loss, learning_rate, num_train_steps, num_warmup_steps, use_tpu) + + output_spec = tf.compat.v1.estimator.tpu.TPUEstimatorSpec( + mode=mode, + loss=total_loss, + train_op=train_op, + scaffold_fn=scaffold_fn) + elif mode == tf.estimator.ModeKeys.PREDICT: + predictions = { + "unique_ids": unique_ids, + "start_logits": start_logits, + "end_logits": end_logits, + } + output_spec = tf.compat.v1.estimator.tpu.TPUEstimatorSpec( + mode=mode, predictions=predictions, scaffold_fn=scaffold_fn) + else: + raise ValueError( + "Only TRAIN and PREDICT modes are supported: %s" % (mode)) + + return output_spec + + return model_fn + + +def input_fn_builder(input_file, seq_length, is_training, drop_remainder): + """Creates an `input_fn` closure to be passed to TPUEstimator.""" + + name_to_features = { + "unique_ids": tf.io.FixedLenFeature([], tf.int64), + "input_ids": tf.io.FixedLenFeature([seq_length], tf.int64), + "input_mask": tf.io.FixedLenFeature([seq_length], tf.int64), + "segment_ids": tf.io.FixedLenFeature([seq_length], tf.int64), + } + + if is_training: + name_to_features["start_positions"] = tf.io.FixedLenFeature([], tf.int64) + name_to_features["end_positions"] = tf.io.FixedLenFeature([], tf.int64) + + def _decode_record(record, name_to_features): + """Decodes a record to a TensorFlow example.""" + example = tf.io.parse_single_example(serialized=record, features=name_to_features) + + # tf.Example only supports tf.int64, but the TPU only supports tf.int32. + # So cast all int64 to int32. + for name in list(example.keys()): + t = example[name] + if t.dtype == tf.int64: + t = tf.cast(t, dtype=tf.int32) + example[name] = t + + return example + + def input_fn(params): + """The actual input function.""" + batch_size = params["batch_size"] + + # For training, we want a lot of parallel reading and shuffling. + # For eval, we want no shuffling and parallel reading doesn't matter. + d = tf.data.TFRecordDataset(input_file) + if is_training: + d = d.repeat() + d = d.shuffle(buffer_size=100) + + d = d.apply( + tf.data.experimental.map_and_batch( + lambda record: _decode_record(record, name_to_features), + batch_size=batch_size, + drop_remainder=drop_remainder)) + + return d + + return input_fn + + +RawResult = collections.namedtuple("RawResult", + ["unique_id", "start_logits", "end_logits"]) + + +def write_predictions(all_examples, all_features, all_results, n_best_size, + max_answer_length, do_lower_case, output_prediction_file, + output_nbest_file, output_null_log_odds_file): + """Write final predictions to the json file and log-odds of null if needed.""" + tf.compat.v1.logging.info("Writing predictions to: %s" % (output_prediction_file)) + tf.compat.v1.logging.info("Writing nbest to: %s" % (output_nbest_file)) + + example_index_to_features = collections.defaultdict(list) + for feature in all_features: + example_index_to_features[feature.example_index].append(feature) + + unique_id_to_result = {} + for result in all_results: + unique_id_to_result[result.unique_id] = result + + _PrelimPrediction = collections.namedtuple( # pylint: disable=invalid-name + "PrelimPrediction", + ["feature_index", "start_index", "end_index", "start_logit", "end_logit"]) + + all_predictions = collections.OrderedDict() + all_nbest_json = collections.OrderedDict() + scores_diff_json = collections.OrderedDict() + + for (example_index, example) in enumerate(all_examples): + features = example_index_to_features[example_index] + + prelim_predictions = [] + # keep track of the minimum score of null start+end of position 0 + score_null = 1000000 # large and positive + min_null_feature_index = 0 # the paragraph slice with min mull score + null_start_logit = 0 # the start logit at the slice with min null score + null_end_logit = 0 # the end logit at the slice with min null score + for (feature_index, feature) in enumerate(features): + result = unique_id_to_result[feature.unique_id] + start_indexes = _get_best_indexes(result.start_logits, n_best_size) + end_indexes = _get_best_indexes(result.end_logits, n_best_size) + # if we could have irrelevant answers, get the min score of irrelevant + if FLAGS.version_2_with_negative: + feature_null_score = result.start_logits[0] + result.end_logits[0] + if feature_null_score < score_null: + score_null = feature_null_score + min_null_feature_index = feature_index + null_start_logit = result.start_logits[0] + null_end_logit = result.end_logits[0] + for start_index in start_indexes: + for end_index in end_indexes: + # We could hypothetically create invalid predictions, e.g., predict + # that the start of the span is in the question. We throw out all + # invalid predictions. + if start_index >= len(feature.tokens): + continue + if end_index >= len(feature.tokens): + continue + if start_index not in feature.token_to_orig_map: + continue + if end_index not in feature.token_to_orig_map: + continue + if not feature.token_is_max_context.get(start_index, False): + continue + if end_index < start_index: + continue + length = end_index - start_index + 1 + if length > max_answer_length: + continue + prelim_predictions.append( + _PrelimPrediction( + feature_index=feature_index, + start_index=start_index, + end_index=end_index, + start_logit=result.start_logits[start_index], + end_logit=result.end_logits[end_index])) + + if FLAGS.version_2_with_negative: + prelim_predictions.append( + _PrelimPrediction( + feature_index=min_null_feature_index, + start_index=0, + end_index=0, + start_logit=null_start_logit, + end_logit=null_end_logit)) + prelim_predictions = sorted( + prelim_predictions, + key=lambda x: (x.start_logit + x.end_logit), + reverse=True) + + _NbestPrediction = collections.namedtuple( # pylint: disable=invalid-name + "NbestPrediction", ["text", "start_logit", "end_logit"]) + + seen_predictions = {} + nbest = [] + for pred in prelim_predictions: + if len(nbest) >= n_best_size: + break + feature = features[pred.feature_index] + if pred.start_index > 0: # this is a non-null prediction + tok_tokens = feature.tokens[pred.start_index:(pred.end_index + 1)] + orig_doc_start = feature.token_to_orig_map[pred.start_index] + orig_doc_end = feature.token_to_orig_map[pred.end_index] + orig_tokens = example.doc_tokens[orig_doc_start:(orig_doc_end + 1)] + tok_text = " ".join(tok_tokens) + + # De-tokenize WordPieces that have been split off. + tok_text = tok_text.replace(" ##", "") + tok_text = tok_text.replace("##", "") + + # Clean whitespace + tok_text = tok_text.strip() + tok_text = " ".join(tok_text.split()) + orig_text = " ".join(orig_tokens) + + final_text = get_final_text(tok_text, orig_text, do_lower_case) + if final_text in seen_predictions: + continue + + seen_predictions[final_text] = True + else: + final_text = "" + seen_predictions[final_text] = True + + nbest.append( + _NbestPrediction( + text=final_text, + start_logit=pred.start_logit, + end_logit=pred.end_logit)) + + # if we didn't inlude the empty option in the n-best, inlcude it + if FLAGS.version_2_with_negative: + if "" not in seen_predictions: + nbest.append( + _NbestPrediction( + text="", start_logit=null_start_logit, + end_logit=null_end_logit)) + # In very rare edge cases we could have no valid predictions. So we + # just create a nonce prediction in this case to avoid failure. + if not nbest: + nbest.append( + _NbestPrediction(text="empty", start_logit=0.0, end_logit=0.0)) + + assert len(nbest) >= 1 + + total_scores = [] + best_non_null_entry = None + for entry in nbest: + total_scores.append(entry.start_logit + entry.end_logit) + if not best_non_null_entry: + if entry.text: + best_non_null_entry = entry + + probs = _compute_softmax(total_scores) + + nbest_json = [] + for (i, entry) in enumerate(nbest): + output = collections.OrderedDict() + output["text"] = entry.text + output["probability"] = probs[i] + output["start_logit"] = entry.start_logit + output["end_logit"] = entry.end_logit + nbest_json.append(output) + + assert len(nbest_json) >= 1 + + if not FLAGS.version_2_with_negative: + all_predictions[example.qas_id] = nbest_json[0]["text"] + else: + # predict "" iff the null score - the score of best non-null > threshold + score_diff = score_null - best_non_null_entry.start_logit - ( + best_non_null_entry.end_logit) + scores_diff_json[example.qas_id] = score_diff + if score_diff > FLAGS.null_score_diff_threshold: + all_predictions[example.qas_id] = "" + else: + all_predictions[example.qas_id] = best_non_null_entry.text + + all_nbest_json[example.qas_id] = nbest_json + + with tf.io.gfile.GFile(output_prediction_file, "w") as writer: + writer.write(json.dumps(all_predictions, indent=4) + "\n") + + with tf.io.gfile.GFile(output_nbest_file, "w") as writer: + writer.write(json.dumps(all_nbest_json, indent=4) + "\n") + + if FLAGS.version_2_with_negative: + with tf.io.gfile.GFile(output_null_log_odds_file, "w") as writer: + writer.write(json.dumps(scores_diff_json, indent=4) + "\n") + + +def get_final_text(pred_text, orig_text, do_lower_case): + """Project the tokenized prediction back to the original text.""" + + # When we created the data, we kept track of the alignment between original + # (whitespace tokenized) tokens and our WordPiece tokenized tokens. So + # now `orig_text` contains the span of our original text corresponding to the + # span that we predicted. + # + # However, `orig_text` may contain extra characters that we don't want in + # our prediction. + # + # For example, let's say: + # pred_text = steve smith + # orig_text = Steve Smith's + # + # We don't want to return `orig_text` because it contains the extra "'s". + # + # We don't want to return `pred_text` because it's already been normalized + # (the SQuAD eval script also does punctuation stripping/lower casing but + # our tokenizer does additional normalization like stripping accent + # characters). + # + # What we really want to return is "Steve Smith". + # + # Therefore, we have to apply a semi-complicated alignment heruistic between + # `pred_text` and `orig_text` to get a character-to-charcter alignment. This + # can fail in certain cases in which case we just return `orig_text`. + + def _strip_spaces(text): + ns_chars = [] + ns_to_s_map = collections.OrderedDict() + for (i, c) in enumerate(text): + if c == " ": + continue + ns_to_s_map[len(ns_chars)] = i + ns_chars.append(c) + ns_text = "".join(ns_chars) + return (ns_text, ns_to_s_map) + + # We first tokenize `orig_text`, strip whitespace from the result + # and `pred_text`, and check if they are the same length. If they are + # NOT the same length, the heuristic has failed. If they are the same + # length, we assume the characters are one-to-one aligned. + tokenizer = tokenization.BasicTokenizer(do_lower_case=do_lower_case) + + tok_text = " ".join(tokenizer.tokenize(orig_text)) + + start_position = tok_text.find(pred_text) + if start_position == -1: + if FLAGS.verbose_logging: + tf.compat.v1.logging.info( + "Unable to find text: '%s' in '%s'" % (pred_text, orig_text)) + return orig_text + end_position = start_position + len(pred_text) - 1 + + (orig_ns_text, orig_ns_to_s_map) = _strip_spaces(orig_text) + (tok_ns_text, tok_ns_to_s_map) = _strip_spaces(tok_text) + + if len(orig_ns_text) != len(tok_ns_text): + if FLAGS.verbose_logging: + tf.compat.v1.logging.info("Length not equal after stripping spaces: '%s' vs '%s'", + orig_ns_text, tok_ns_text) + return orig_text + + # We then project the characters in `pred_text` back to `orig_text` using + # the character-to-character alignment. + tok_s_to_ns_map = {} + for (i, tok_index) in six.iteritems(tok_ns_to_s_map): + tok_s_to_ns_map[tok_index] = i + + orig_start_position = None + if start_position in tok_s_to_ns_map: + ns_start_position = tok_s_to_ns_map[start_position] + if ns_start_position in orig_ns_to_s_map: + orig_start_position = orig_ns_to_s_map[ns_start_position] + + if orig_start_position is None: + if FLAGS.verbose_logging: + tf.compat.v1.logging.info("Couldn't map start position") + return orig_text + + orig_end_position = None + if end_position in tok_s_to_ns_map: + ns_end_position = tok_s_to_ns_map[end_position] + if ns_end_position in orig_ns_to_s_map: + orig_end_position = orig_ns_to_s_map[ns_end_position] + + if orig_end_position is None: + if FLAGS.verbose_logging: + tf.compat.v1.logging.info("Couldn't map end position") + return orig_text + + output_text = orig_text[orig_start_position:(orig_end_position + 1)] + return output_text + + +def _get_best_indexes(logits, n_best_size): + """Get the n-best logits from a list.""" + index_and_score = sorted(enumerate(logits), key=lambda x: x[1], reverse=True) + + best_indexes = [] + for i in range(len(index_and_score)): + if i >= n_best_size: + break + best_indexes.append(index_and_score[i][0]) + return best_indexes + + +def _compute_softmax(scores): + """Compute softmax probability over raw logits.""" + if not scores: + return [] + + max_score = None + for score in scores: + if max_score is None or score > max_score: + max_score = score + + exp_scores = [] + total_sum = 0.0 + for score in scores: + x = math.exp(score - max_score) + exp_scores.append(x) + total_sum += x + + probs = [] + for score in exp_scores: + probs.append(score / total_sum) + return probs + + +class FeatureWriter(object): + """Writes InputFeature to TF example file.""" + + def __init__(self, filename, is_training): + self.filename = filename + self.is_training = is_training + self.num_features = 0 + self._writer = tf.io.TFRecordWriter(filename) + + def process_feature(self, feature): + """Write a InputFeature to the TFRecordWriter as a tf.train.Example.""" + self.num_features += 1 + + def create_int_feature(values): + feature = tf.train.Feature( + int64_list=tf.train.Int64List(value=list(values))) + return feature + + features = collections.OrderedDict() + features["unique_ids"] = create_int_feature([feature.unique_id]) + features["input_ids"] = create_int_feature(feature.input_ids) + features["input_mask"] = create_int_feature(feature.input_mask) + features["segment_ids"] = create_int_feature(feature.segment_ids) + + if self.is_training: + features["start_positions"] = create_int_feature([feature.start_position]) + features["end_positions"] = create_int_feature([feature.end_position]) + impossible = 0 + if feature.is_impossible: + impossible = 1 + features["is_impossible"] = create_int_feature([impossible]) + + tf_example = tf.train.Example(features=tf.train.Features(feature=features)) + self._writer.write(tf_example.SerializeToString()) + + def close(self): + self._writer.close() + + +def validate_flags_or_throw(bert_config): + """Validate the input FLAGS or throw an exception.""" + tokenization.validate_case_matches_checkpoint(FLAGS.do_lower_case, + FLAGS.init_checkpoint) + + if not FLAGS.do_train and not FLAGS.do_predict: + raise ValueError("At least one of `do_train` or `do_predict` must be True.") + + if FLAGS.do_train: + if not FLAGS.train_file: + raise ValueError( + "If `do_train` is True, then `train_file` must be specified.") + if FLAGS.do_predict: + if not FLAGS.predict_file: + raise ValueError( + "If `do_predict` is True, then `predict_file` must be specified.") + + if FLAGS.max_seq_length > bert_config.max_position_embeddings: + raise ValueError( + "Cannot use sequence length %d because the BERT model " + "was only trained up to sequence length %d" % + (FLAGS.max_seq_length, bert_config.max_position_embeddings)) + + if FLAGS.max_seq_length <= FLAGS.max_query_length + 3: + raise ValueError( + "The max_seq_length (%d) must be greater than max_query_length " + "(%d) + 3" % (FLAGS.max_seq_length, FLAGS.max_query_length)) + + +def main(_): + + print("# ------------------------------ #") + print("# #") + print("# ------------- #") + print("# -- START -- #") + print("# ------------- #") + print("# #") + print("# ------------------------------ #") + + # --- FOR CALCULATORS + exp = ExpParams(FLAGS) + # ------------------- + + + + tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.INFO) + + bert_config = modeling.BertConfig.from_json_file(FLAGS.bert_config_file) + + validate_flags_or_throw(bert_config) + + tf.io.gfile.makedirs(FLAGS.output_dir) + + tokenizer = tokenization.FullTokenizer( + vocab_file=FLAGS.vocab_file, do_lower_case=FLAGS.do_lower_case) + + tpu_cluster_resolver = None + if FLAGS.use_tpu and FLAGS.tpu_name: + tpu_cluster_resolver = slim.cluster_resolver.TPUClusterResolver( + FLAGS.tpu_name, zone=FLAGS.tpu_zone, project=FLAGS.gcp_project) + + # is_per_host = tf.compat.v1.estimator.tpu.InputPipelineConfig.PER_HOST_V2 + is_per_host = tf.compat.v1.estimator.tpu.InputPipelineConfig.PER_HOST_V2 + run_config = tf.compat.v1.estimator.tpu.RunConfig( + cluster=tpu_cluster_resolver, + master=FLAGS.master, + model_dir=FLAGS.output_dir, + save_checkpoints_steps=FLAGS.save_checkpoints_steps, + tpu_config=tf.compat.v1.estimator.tpu.TPUConfig( + iterations_per_loop=FLAGS.iterations_per_loop, + num_shards=FLAGS.num_tpu_cores, + per_host_input_for_training=is_per_host)) + + train_examples = None + num_train_steps = None + num_warmup_steps = None + if FLAGS.do_train: + train_examples = read_squad_examples( + input_file=FLAGS.train_file, is_training=True) + num_train_steps = int( + len(train_examples) / FLAGS.train_batch_size * FLAGS.num_train_epochs) + num_warmup_steps = int(num_train_steps * FLAGS.warmup_proportion) + + # Pre-shuffle the input to avoid having to make a very large shuffle + # buffer in in the `input_fn`. + rng = random.Random(12345) + rng.shuffle(train_examples) + + model_fn = model_fn_builder( + bert_config=bert_config, + init_checkpoint=FLAGS.init_checkpoint, + learning_rate=FLAGS.learning_rate, + num_train_steps=num_train_steps, + num_warmup_steps=num_warmup_steps, + use_tpu=FLAGS.use_tpu, + use_one_hot_embeddings=FLAGS.use_tpu) + + # If TPU is not available, this will fall back to normal Estimator on CPU + # or GPU. + estimator = tf.compat.v1.estimator.tpu.TPUEstimator( + use_tpu=FLAGS.use_tpu, + model_fn=model_fn, + config=run_config, + train_batch_size=FLAGS.train_batch_size, + predict_batch_size=FLAGS.predict_batch_size) + + + + + + + + # # --- FOR CALCULATORS + # tracker = prepare_calculator(exp) + # if exp.name_calc == 'green_algorithms': + # cpu_util = [] + # gpu_util = [] + # ram_util = [] + # start_calculators(exp, tracker) + # t0 = time.time() + # # ------------------- + + + + ##################### + ##### Training ##### + ##################### + + if (exp.ml == "training") and exp.name_calc != 'flops': + + print("# ---------------------- #") + print("# --- training start --- #") + print("# ---------------------- #") + + # Delete previously saved model + tmp_model_foder_name = os.path.join(exp.path_logs_and_results, exp.name+"_model") + for f in os.listdir(tmp_model_foder_name): + os.remove(os.path.join(tmp_model_foder_name, f)) + + + # We write to a temporary file to avoid storing very large constant tensors + # in memory. + train_writer = FeatureWriter( + filename=os.path.join(FLAGS.output_dir, "train.tf_record"), + is_training=True) + convert_examples_to_features( + examples=train_examples, + tokenizer=tokenizer, + max_seq_length=FLAGS.max_seq_length, + doc_stride=FLAGS.doc_stride, + max_query_length=FLAGS.max_query_length, + is_training=True, + output_fn=train_writer.process_feature) + train_writer.close() + + tf.compat.v1.logging.info("***** Running training *****") + tf.compat.v1.logging.info(" Num orig examples = %d", len(train_examples)) + tf.compat.v1.logging.info(" Num split examples = %d", train_writer.num_features) + tf.compat.v1.logging.info(" Batch size = %d", FLAGS.train_batch_size) + tf.compat.v1.logging.info(" Num steps = %d", num_train_steps) + del train_examples + + train_input_fn = input_fn_builder( + input_file=train_writer.filename, + seq_length=FLAGS.max_seq_length, + is_training=True, + drop_remainder=True) + + + # --- FOR CALCULATORS + tracker = prepare_calculator(exp) + if exp.name_calc == 'green_algorithms': + cpu_util = [] + gpu_util = [] + ram_util = [] + start_calculators(exp, tracker) + t0 = time.time() + # ------------------- + + + if (exp.ml == "training") and exp.name_calc != 'flops': + + if FLAGS.calculator == "carbon_tracker": + estimator.train( + input_fn=train_input_fn, + epochs=FLAGS.num_train_epochs, + max_steps=num_train_steps, + tracker=tracker + ) + else: + estimator.train( + input_fn=train_input_fn, + epochs=FLAGS.num_train_epochs, + max_steps=num_train_steps, + ) + + print("# ---------------------- #") + print("# --- training end --- #") + print("# ---------------------- #") + + + + + ##################### + ##### Inference ##### + ##################### + + elif (exp.ml == "inference") and not exp.name_calc == 'flops': + + print("# ----------------------- #") + print("# --- inference start --- #") + print("# ----------------------- #") + + eval_examples = read_squad_examples( + input_file=FLAGS.predict_file, is_training=False) + + eval_writer = FeatureWriter( + filename=os.path.join(FLAGS.output_dir, "eval.tf_record"), + is_training=False) + eval_features = [] + + def append_feature(feature): + eval_features.append(feature) + eval_writer.process_feature(feature) + + convert_examples_to_features( + examples=eval_examples, + tokenizer=tokenizer, + max_seq_length=FLAGS.max_seq_length, + doc_stride=FLAGS.doc_stride, + max_query_length=FLAGS.max_query_length, + is_training=False, + output_fn=append_feature) + eval_writer.close() + + tf.compat.v1.logging.info("***** Running predictions *****") + tf.compat.v1.logging.info(" Num orig examples = %d", len(eval_examples)) + tf.compat.v1.logging.info(" Num split examples = %d", len(eval_features)) + tf.compat.v1.logging.info(" Batch size = %d", FLAGS.predict_batch_size) + + all_results = [] + + predict_input_fn = input_fn_builder( + input_file=eval_writer.filename, + seq_length=FLAGS.max_seq_length, + is_training=False, + drop_remainder=False) + + # If running eval on the TPU, you will need to specify the number of + # steps. + all_results = [] + + for result in estimator.predict( + predict_input_fn, yield_single_examples=True): + + if exp.name == "SQUAD-extracted": + tf.compat.v1.logging.info("Processing example: %d" % (len(all_results))) + elif len(all_results) % 1000 == 0: + tf.compat.v1.logging.info("Processing example: %d" % (len(all_results))) + unique_id = int(result["unique_ids"]) + start_logits = [float(x) for x in result["start_logits"].flat] + end_logits = [float(x) for x in result["end_logits"].flat] + all_results.append( + RawResult( + unique_id=unique_id, + start_logits=start_logits, + end_logits=end_logits)) + + output_prediction_file = os.path.join(FLAGS.output_dir, "predictions.json") + output_nbest_file = os.path.join(FLAGS.output_dir, "nbest_predictions.json") + output_null_log_odds_file = os.path.join(FLAGS.output_dir, "null_odds.json") + + write_predictions(eval_examples, eval_features, all_results, + FLAGS.n_best_size, FLAGS.max_answer_length, + FLAGS.do_lower_case, output_prediction_file, + output_nbest_file, output_null_log_odds_file) + + print("# ----------------------- #") + print("# --- inference end --- #") + print("# ----------------------- #") + + + # --- FOR CALCULATORS + + tfinal= time.time() + duration = tfinal - t0 + stop_calculators(exp, tracker) + + train_examples = read_squad_examples( + input_file=FLAGS.train_file, is_training=False) + nb_examples = len(train_examples) + graph_folder = os.path.join("models", "bert_saved_model") + Ec_kWh = flops_method_tensorflow(exp, nb_examples, graph_folder) + + + # Saving the data: + if exp.name_calc == 'code_carbon': + save_cc(exp, FLAGS, duration) + elif exp.name_calc == 'carbon_tracker': + save_ct(exp, FLAGS, duration) + elif exp.name_calc == 'eco2ai': + save_eco2ai(exp, FLAGS, duration) + elif exp.name_calc == 'green_algorithms': + if exp.automated and exp.parallel: + stop_UTIL(exp, t0, tfinal) + cpu_util, gpu_util, ram_util = mean_parallel_UTIL(exp) + save_ga(exp, FLAGS, duration, exp.automated, cpu_util, gpu_util, ram_util) + elif exp.name_calc == 'tapo': + stop_TAPO(exp, t0, tfinal) + save_tapo(exp, FLAGS) + elif exp.name_calc =='flops': + save_FLOPS(exp, FLAGS, Ec_kWh) + else: # no calculator + save_nocalc(exp, FLAGS, duration) + # ---------------------- + +if __name__ == "__main__": + flags.mark_flag_as_required("vocab_file") + flags.mark_flag_as_required("bert_config_file") + flags.mark_flag_as_required("output_dir") + tf.compat.v1.app.run() diff --git a/exp-4-bert-squad/tf_updated_files/estimator.py b/exp-4-bert-squad/tf_updated_files/estimator.py new file mode 100644 index 0000000..e9051f7 --- /dev/null +++ b/exp-4-bert-squad/tf_updated_files/estimator.py @@ -0,0 +1,2468 @@ +# Copyright 2016 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Base Estimator class.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import collections +import copy +import os +import tempfile + +import numpy as np +import six +import tensorflow as tf +from carbontracker.tracker import CarbonTracker +from carbontracker import parser as CTparser +from google.protobuf import message +from tensorflow.core.framework import summary_pb2 +from tensorflow.python.checkpoint import checkpoint as trackable_util +from tensorflow.python.checkpoint import checkpoint_management +from tensorflow.python.checkpoint import graph_view +from tensorflow.python.distribute import estimator_training as distribute_coordinator_training +from tensorflow.python.eager import context +from tensorflow.python.eager import monitoring +from tensorflow.python.framework import ops +from tensorflow.python.platform import tf_logging as logging +from tensorflow.python.profiler import trace +from tensorflow.python.saved_model import utils_impl as saved_model_utils +from tensorflow.python.summary import summary +from tensorflow.python.training import basic_session_run_hooks +from tensorflow.python.training import device_setter +from tensorflow.python.training import evaluation +from tensorflow.python.training import training +from tensorflow.python.training import training_util +from tensorflow.python.util import compat_internal +from tensorflow.python.util import deprecation +from tensorflow.python.util import function_utils +from tensorflow.python.util import tf_contextlib +from tensorflow.python.util.tf_export import estimator_export +from tensorflow.tools.docs import doc_controls +from tensorflow_estimator.python.estimator import model_fn as model_fn_lib +from tensorflow_estimator.python.estimator import run_config +from tensorflow_estimator.python.estimator import util as estimator_util +from tensorflow_estimator.python.estimator.export import export_lib +from tensorflow_estimator.python.estimator.mode_keys import ModeKeys + +_VALID_MODEL_FN_ARGS = set( + ['features', 'labels', 'mode', 'params', 'self', 'config']) +_estimator_api_gauge = monitoring.BoolGauge('/tensorflow/api/estimator', + 'estimator api usage', 'method') + +_canned_estimator_api_gauge = monitoring.StringGauge( + '/tensorflow/api/estimator/canned_estimator', + 'Gauge to track the type of canned estimator used', 'ClassType') + +# epochs = os.getenv('NUMBER_OF_EPOCHS', 2) + +# output_file = os.getenv('OUTPUT_ESTIMATOR') +# tracker = CarbonTracker(epochs=epochs, update_interval = 0.01, log_dir = output_file, monitor_epochs = -1, epochs_before_pred = 0, decimal_precision = 10) + + +@estimator_export(v1=['estimator.Estimator']) +@doc_controls.inheritable_header("""\ + Warning: Estimators are not recommended for new code. Estimators run + `v1.Session`-style code which is more difficult to write correctly, and + can behave unexpectedly, especially when combined with TF 2 code. Estimators + do fall under our + [compatibility guarantees](https://tensorflow.org/guide/versions), but will + receive no fixes other than security vulnerabilities. See the + [migration guide](https://tensorflow.org/guide/migrate) for details. + """) +class Estimator(object): + """Estimator class to train and evaluate TensorFlow models. + + The `Estimator` object wraps a model which is specified by a `model_fn`, + which, given inputs and a number of other parameters, returns the ops + necessary to perform training, evaluation, or predictions. + + All outputs (checkpoints, event files, etc.) are written to `model_dir`, or a + subdirectory thereof. If `model_dir` is not set, a temporary directory is + used. + + The `config` argument can be passed `tf.estimator.RunConfig` object containing + information about the execution environment. It is passed on to the + `model_fn`, if the `model_fn` has a parameter named "config" (and input + functions in the same manner). If the `config` parameter is not passed, it is + instantiated by the `Estimator`. Not passing config means that defaults useful + for local execution are used. `Estimator` makes config available to the model + (for instance, to allow specialization based on the number of workers + available), and also uses some of its fields to control internals, especially + regarding checkpointing. + + The `params` argument contains hyperparameters. It is passed to the + `model_fn`, if the `model_fn` has a parameter named "params", and to the input + functions in the same manner. `Estimator` only passes params along, it does + not inspect it. The structure of `params` is therefore entirely up to the + developer. + + None of `Estimator`'s methods can be overridden in subclasses (its + constructor enforces this). Subclasses should use `model_fn` to configure + the base class, and may add methods implementing specialized functionality. + + See [estimators](https://tensorflow.org/guide/estimator) for more + information. + + To warm-start an `Estimator`: + + ```python + estimator = tf.estimator.DNNClassifier( + feature_columns=[categorical_feature_a_emb, categorical_feature_b_emb], + hidden_units=[1024, 512, 256], + warm_start_from="/path/to/checkpoint/dir") + ``` + + For more details on warm-start configuration, see + `tf.estimator.WarmStartSettings`. + + @compatibility(eager) + Calling methods of `Estimator` will work while eager execution is enabled. + However, the `model_fn` and `input_fn` is not executed eagerly, `Estimator` + will switch to graph mode before calling all user-provided functions (incl. + hooks), so their code has to be compatible with graph mode execution. Note + that `input_fn` code using `tf.data` generally works in both graph and eager + modes. + @end_compatibility + """ + + def __init__(self, + model_fn, + model_dir=None, + config=None, + params=None, + warm_start_from=None): + """Constructs an `Estimator` instance. + + + + Args: + model_fn: Model function. Follows the signature: + * `features` -- This is the first item returned from the `input_fn` + passed to `train`, `evaluate`, and `predict`. This should be a + single `tf.Tensor` or `dict` of same. + * `labels` -- This is the second item returned from the `input_fn` + passed to `train`, `evaluate`, and `predict`. This should be a + single `tf.Tensor` or `dict` of same (for multi-head models). If + mode is `tf.estimator.ModeKeys.PREDICT`, `labels=None` will be + passed. If the `model_fn`'s signature does not accept `mode`, the + `model_fn` must still be able to handle `labels=None`. + * `mode` -- Optional. Specifies if this is training, evaluation or + prediction. See `tf.estimator.ModeKeys`. + `params` -- Optional `dict` of hyperparameters. Will receive what is + passed to Estimator in `params` parameter. This allows to configure + Estimators from hyper parameter tuning. + * `config` -- Optional `estimator.RunConfig` object. Will receive what + is passed to Estimator as its `config` parameter, or a default + value. Allows setting up things in your `model_fn` based on + configuration such as `num_ps_replicas`, or `model_dir`. + * Returns -- `tf.estimator.EstimatorSpec` + model_dir: Directory to save model parameters, graph and etc. This can + also be used to load checkpoints from the directory into an estimator to + continue training a previously saved model. If `PathLike` object, the + path will be resolved. If `None`, the model_dir in `config` will be used + if set. If both are set, they must be same. If both are `None`, a + temporary directory will be used. + config: `estimator.RunConfig` configuration object. + params: `dict` of hyper parameters that will be passed into `model_fn`. + Keys are names of parameters, values are basic python types. + warm_start_from: Optional string filepath to a checkpoint or SavedModel to + warm-start from, or a `tf.estimator.WarmStartSettings` object to fully + configure warm-starting. If None, only TRAINABLE variables are + warm-started. If the string filepath is provided instead of a + `tf.estimator.WarmStartSettings`, then all variables are warm-started, + and it is assumed that vocabularies and `tf.Tensor` names are unchanged. + + Raises: + ValueError: parameters of `model_fn` don't match `params`. + ValueError: if this is called via a subclass and if that class overrides + a member of `Estimator`. + """ + _estimator_api_gauge.get_cell('init').set(True) + # We do not endorse Estimator child classes to override methods in + # Estimator, other than a select few. You're on your own if you cleverly + # override the method "_assert_members_are_not_overridden". + self.__class__._assert_members_are_not_overridden(self) # pylint: disable=protected-access + + self._config = maybe_overwrite_model_dir_and_session_config( + config, model_dir) + + # The distribute field contains an instance of tf.distribute.Strategy. + self._train_distribution = self._config.train_distribute + self._eval_distribution = self._config.eval_distribute + # Model directory. + self._model_dir = self._config.model_dir + self._session_config = self._config.session_config + tf.compat.v1.logging.info('Using config: %s', str(vars(self._config))) + + self._device_fn = ( + self._config.device_fn or _get_replica_device_setter(self._config)) + + if model_fn is None: + raise ValueError('model_fn must be provided to Estimator.') + model_fn_lib.verify_model_fn_args(model_fn, params) + self._model_fn = model_fn + self._params = copy.deepcopy(params or {}) + + # pylint: disable=protected-access + self._warm_start_settings = _get_default_warm_start_settings( + warm_start_from) + # pylint: enable=protected-access + + @property + def model_dir(self): + return self._model_dir + + @property + def config(self): + return copy.deepcopy(self._config) + + @property + def params(self): + return copy.deepcopy(self._params) + + @property + def model_fn(self): + """Returns the `model_fn` which is bound to `self.params`. + + Returns: + The `model_fn` with following signature: + `def model_fn(features, labels, mode, config)` + """ + + def public_model_fn(features, labels, mode, config): + return self._call_model_fn(features, labels, mode, config) + + return public_model_fn + + # TODO(ispir): support a list of names + def get_variable_value(self, name): + """Returns value of the variable given by name. + + Args: + name: string or a list of string, name of the tensor. + + Returns: + Numpy array - value of the tensor. + + Raises: + ValueError: If the `Estimator` has not produced a checkpoint yet. + """ + _check_checkpoint_available(self.model_dir) + with context.graph_mode(): + return tf.train.load_variable(self.model_dir, name) + + def get_variable_names(self): + """Returns list of all variable names in this model. + + Returns: + List of names. + + Raises: + ValueError: If the `Estimator` has not produced a checkpoint yet. + """ + _check_checkpoint_available(self.model_dir) + with context.graph_mode(): + return [name for name, _ in tf.train.list_variables(self.model_dir)] + + def latest_checkpoint(self): + """Finds the filename of the latest saved checkpoint file in `model_dir`. + + Returns: + The full path to the latest checkpoint or `None` if no checkpoint was + found. + """ + with context.graph_mode(): + return checkpoint_management.latest_checkpoint(self.model_dir) + + def train(self, + input_fn, + epochs, + hooks=None, + steps=None, + max_steps=None, + saving_listeners=None, + tracker=None): + """Trains a model given training data `input_fn`. + + Args: + input_fn: A function that provides input data for training as minibatches. + See [Premade Estimators]( + https://tensorflow.org/guide/premade_estimators#create_input_functions) + for more information. The function should construct and return one of + the following: + * A `tf.data.Dataset` object: Outputs of `Dataset` object must be a + tuple `(features, labels)` with same constraints as below. + * A tuple `(features, labels)`: Where `features` is a `tf.Tensor` or a + dictionary of string feature name to `Tensor` and `labels` is a + `Tensor` or a dictionary of string label name to `Tensor`. Both + `features` and `labels` are consumed by `model_fn`. They should + satisfy the expectation of `model_fn` from inputs. + hooks: List of `tf.train.SessionRunHook` subclass instances. Used for + callbacks inside the training loop. + steps: Number of steps for which to train the model. If `None`, train + forever or train until `input_fn` generates the `tf.errors.OutOfRange` + error or `StopIteration` exception. `steps` works incrementally. If you + call two times `train(steps=10)` then training occurs in total 20 steps. + If `OutOfRange` or `StopIteration` occurs in the middle, training stops + before 20 steps. If you don't want to have incremental behavior please + set `max_steps` instead. If set, `max_steps` must be `None`. + max_steps: Number of total steps for which to train model. If `None`, + train forever or train until `input_fn` generates the + `tf.errors.OutOfRange` error or `StopIteration` exception. If set, + `steps` must be `None`. If `OutOfRange` or `StopIteration` occurs in the + middle, training stops before `max_steps` steps. Two calls to + `train(steps=100)` means 200 training iterations. On the other hand, two + calls to `train(max_steps=100)` means that the second call will not do + any iteration since first call did all 100 steps. + saving_listeners: list of `CheckpointSaverListener` objects. Used for + callbacks that run immediately before or after checkpoint savings. + epochs: number of training epochs. + tracker: Carbon tracker. + + Returns: + `self`, for chaining. + + Raises: + ValueError: If both `steps` and `max_steps` are not `None`. + ValueError: If either `steps` or `max_steps <= 0`. + """ + global training_steps + training_steps = max_steps + + global training_epochs + training_epochs = epochs + + global carbon_tracker + carbon_tracker = tracker + + + _estimator_api_gauge.get_cell('train').set(True) + if self.config.task_type in (run_config.TaskType.EVALUATOR, + run_config.TaskType.PS): + raise ValueError( + 'Train has been called wrong configuration. Please use ' + 'tf.estimator.train_and_evaluate which calls proper API according ' + 'to given configuration. Current configuration: {}.'.format( + self.config)) + + with context.graph_mode(): + if (steps is not None) and (max_steps is not None): + raise ValueError('Can not provide both steps and max_steps.') + if steps is not None and steps <= 0: + raise ValueError('Must specify steps > 0, given: {}'.format(steps)) + if max_steps is not None and max_steps <= 0: + raise ValueError( + 'Must specify max_steps > 0, given: {}'.format(max_steps)) + + if max_steps is not None: + start_step = _load_global_step_from_checkpoint_dir(self._model_dir) + if max_steps <= start_step: + logging.info('Skipping training since max_steps has already saved.') + return self + + hooks = _check_hooks_type(hooks) + logging.info('HOOKS: \n') + logging.info(hooks) + hooks.extend(self._convert_train_steps_to_hooks(steps, max_steps)) + + saving_listeners = _check_listeners_type(saving_listeners) + loss = self._train_model(input_fn, hooks, saving_listeners) + logging.info('Loss for final step: %s.', loss) + return self + + def _convert_train_steps_to_hooks(self, steps, max_steps): + """Create hooks to run correct number of steps in training. + + Args: + steps: number of steps to run during training. + max_steps: maximum number of steps to be run during training. It'll be the + maximum number of steps the model will train to after restoring from + checkpoint even across multiple estimator.train calls. + + Returns: + List of hooks to be passed to the estimator. + """ + if steps is not None or max_steps is not None: + if self._train_distribution: + steps_per_run = getattr(self._train_distribution.extended, + 'steps_per_run', 1) + if steps_per_run > 1: + return [ + basic_session_run_hooks._MultiStepStopAtStepHook( # pylint: disable=protected-access + steps, max_steps, steps_per_run) + ] + return [tf.compat.v1.train.StopAtStepHook(steps, max_steps)] + else: + return [] + + def eval_dir(self, name=None): + """Shows the directory name where evaluation metrics are dumped. + + Args: + name: Name of the evaluation if user needs to run multiple evaluations on + different data sets, such as on training data vs test data. Metrics for + different evaluations are saved in separate folders, and appear + separately in tensorboard. + + Returns: + A string which is the path of directory contains evaluation metrics. + """ + return os.path.join(self._model_dir, 'eval' if not name else 'eval_' + name) + + def evaluate(self, + input_fn, + steps=None, + hooks=None, + checkpoint_path=None, + name=None): + """Evaluates the model given evaluation data `input_fn`. + + For each step, calls `input_fn`, which returns one batch of data. + Evaluates until: + - `steps` batches are processed, or + - `input_fn` raises an end-of-input exception (`tf.errors.OutOfRangeError` + or `StopIteration`). + + Args: + input_fn: A function that constructs the input data for evaluation. See + [Premade Estimators]( + https://tensorflow.org/guide/premade_estimators#create_input_functions) + for more information. The function should construct and return one of + the following: + * A `tf.data.Dataset` object: Outputs of `Dataset` object must be a + tuple `(features, labels)` with same constraints as below. + * A tuple `(features, labels)`: Where `features` is a `tf.Tensor` or a + dictionary of string feature name to `Tensor` and `labels` is a + `Tensor` or a dictionary of string label name to `Tensor`. Both + `features` and `labels` are consumed by `model_fn`. They should + satisfy the expectation of `model_fn` from inputs. + steps: Number of steps for which to evaluate model. If `None`, evaluates + until `input_fn` raises an end-of-input exception. + hooks: List of `tf.train.SessionRunHook` subclass instances. Used for + callbacks inside the evaluation call. + checkpoint_path: Path of a specific checkpoint to evaluate. If `None`, the + latest checkpoint in `model_dir` is used. If there are no checkpoints + in `model_dir`, evaluation is run with newly initialized `Variables` + instead of ones restored from checkpoint. + name: Name of the evaluation if user needs to run multiple evaluations on + different data sets, such as on training data vs test data. Metrics for + different evaluations are saved in separate folders, and appear + separately in tensorboard. + + Returns: + A dict containing the evaluation metrics specified in `model_fn` keyed by + name, as well as an entry `global_step` which contains the value of the + global step for which this evaluation was performed. For canned + estimators, the dict contains the `loss` (mean loss per mini-batch) and + the `average_loss` (mean loss per sample). Canned classifiers also return + the `accuracy`. Canned regressors also return the `label/mean` and the + `prediction/mean`. + + Raises: + ValueError: If `steps <= 0`. + """ + _estimator_api_gauge.get_cell('evaluate').set(True) + # pylint: disable=protected-access + if (self._eval_distribution and + hasattr(self._config, '_distribute_coordinator_mode') and + self._config._distribute_coordinator_mode): + return distribute_coordinator_training.estimator_evaluate( + self, + lambda est, s, eval_hooks: est._actual_eval( # pylint: disable=g-long-lambda + input_fn, + strategy=s, + steps=steps, + hooks=eval_hooks, + checkpoint_path=checkpoint_path, + name=name), + hooks) + # pylint: enable=protected-access + else: + return self._actual_eval( + input_fn, + strategy=self._eval_distribution, + steps=steps, + hooks=hooks, + checkpoint_path=checkpoint_path, + name=name) + + def _actual_eval(self, + input_fn, + strategy=None, + steps=None, + hooks=None, + checkpoint_path=None, + name=None): + """The method that does evaluation actually.""" + with context.graph_mode(): + hooks = _check_hooks_type(hooks) + hooks.extend(self._convert_eval_steps_to_hooks(steps)) + + # Check that model has been trained (if nothing has been set explicitly). + if not checkpoint_path: + latest_path = checkpoint_management.latest_checkpoint(self._model_dir) + if not latest_path: + tf.compat.v1.logging.info( + 'Could not find trained model in model_dir: {}, running ' + 'initialization to evaluate.'.format(self._model_dir)) + checkpoint_path = latest_path + + def _evaluate(): + (scaffold, update_op, eval_dict, all_hooks) = ( + self._evaluate_build_graph(input_fn, hooks, checkpoint_path)) + return self._evaluate_run( + checkpoint_path=checkpoint_path, + scaffold=scaffold, + update_op=update_op, + eval_dict=eval_dict, + all_hooks=all_hooks, + output_dir=self.eval_dir(name)) + + with tf.Graph().as_default(): + if strategy: + # We want to create the iterations variable outside the distribution + # scope as that is just stored on the host and mainly used to drive + # the loop and doesn't need to be a Mirrored/Device variable. + training.get_or_create_steps_per_run_variable() + with strategy.scope(): + return _evaluate() + else: + return _evaluate() + + def _convert_eval_steps_to_hooks(self, steps): + """Create hooks to run correct number of steps in evaluation. + + Args: + steps: number of steps to run during evaluation. + + Raises: + ValueError: if steps is less than or equal to zero. + + Returns: + List of hooks to be passed to the estimator. + """ + if steps is None: + return [] + + if steps <= 0: + raise ValueError('Must specify steps > 0, given: {}'.format(steps)) + + # The hooks are declared as private in evaluation.py discourage the use + # by other libraries or open source users. This should be the only usage + # of the estimator evaluation hooks. + if self._eval_distribution: + steps_per_run = getattr(self._eval_distribution.extended, 'steps_per_run', + 1) + if steps_per_run > 1: + return [ + evaluation._MultiStepStopAfterNEvalsHook( # pylint: disable=protected-access + num_evals=steps, + steps_per_run=steps_per_run) + ] + return [evaluation._StopAfterNEvalsHook(num_evals=steps)] # pylint: disable=protected-access + + def predict(self, + input_fn, + predict_keys=None, + hooks=None, + checkpoint_path=None, + yield_single_examples=True): + """Yields predictions for given features. + + Please note that interleaving two predict outputs does not work. See: + [issue/20506]( + https://github.com/tensorflow/tensorflow/issues/20506#issuecomment-422208517) + + Args: + input_fn: A function that constructs the features. Prediction continues + until `input_fn` raises an end-of-input exception + (`tf.errors.OutOfRangeError` or `StopIteration`). See [Premade + Estimators]( + https://tensorflow.org/guide/premade_estimators#create_input_functions) + for more information. The function should construct and return one of + the following: + * `tf.data.Dataset` object -- Outputs of `Dataset` object must have + same constraints as below. + * features -- A `tf.Tensor` or a dictionary of string feature name to + `Tensor`. features are consumed by `model_fn`. They should satisfy + the expectation of `model_fn` from inputs. + * A tuple, in which case + the first item is extracted as features. + predict_keys: list of `str`, name of the keys to predict. It is used if + the `tf.estimator.EstimatorSpec.predictions` is a `dict`. If + `predict_keys` is used then rest of the predictions will be filtered + from the dictionary. If `None`, returns all. + hooks: List of `tf.train.SessionRunHook` subclass instances. Used for + callbacks inside the prediction call. + checkpoint_path: Path of a specific checkpoint to predict. If `None`, the + latest checkpoint in `model_dir` is used. If there are no checkpoints + in `model_dir`, prediction is run with newly initialized `Variables` + instead of ones restored from checkpoint. + yield_single_examples: If `False`, yields the whole batch as returned by + the `model_fn` instead of decomposing the batch into individual + elements. This is useful if `model_fn` returns some tensors whose first + dimension is not equal to the batch size. + + Yields: + Evaluated values of `predictions` tensors. + + Raises: + ValueError: If batch length of predictions is not the same and + `yield_single_examples` is `True`. + ValueError: If there is a conflict between `predict_keys` and + `predictions`. For example if `predict_keys` is not `None` but + `tf.estimator.EstimatorSpec.predictions` is not a `dict`. + """ + _estimator_api_gauge.get_cell('predict').set(True) + with context.graph_mode(): + hooks = _check_hooks_type(hooks) + # Check that model has been trained. + if not checkpoint_path: + checkpoint_path = checkpoint_management.latest_checkpoint( + self._model_dir) + if not checkpoint_path: + tf.compat.v1.logging.info( + 'Could not find trained model in model_dir: {}, running ' + 'initialization to predict.'.format(self._model_dir)) + with tf.Graph().as_default() as g: + tf.compat.v1.random.set_random_seed(self._config.tf_random_seed) + self._create_and_assert_global_step(g) + features, input_hooks = self._get_features_from_input_fn( + input_fn, ModeKeys.PREDICT) + estimator_spec = self._call_model_fn(features, None, ModeKeys.PREDICT, + self.config) + + # Call to warm_start has to be after model_fn is called. + self._maybe_warm_start(checkpoint_path) + + predictions = self._extract_keys(estimator_spec.predictions, + predict_keys) + all_hooks = list(input_hooks) + all_hooks.extend(hooks) + all_hooks.extend(list(estimator_spec.prediction_hooks or [])) + with tf.compat.v1.train.MonitoredSession( + session_creator=tf.compat.v1.train.ChiefSessionCreator( + checkpoint_filename_with_path=checkpoint_path, + master=self._config.master, + scaffold=estimator_spec.scaffold, + config=self._session_config), + hooks=all_hooks) as mon_sess: + while not mon_sess.should_stop(): + preds_evaluated = mon_sess.run(predictions) + if not yield_single_examples: + yield preds_evaluated + elif not isinstance(predictions, dict): + for pred in preds_evaluated: + yield pred + else: + for i in range(self._extract_batch_length(preds_evaluated)): + yield { + key: value[i] + for key, value in six.iteritems(preds_evaluated) + } + + def _assert_members_are_not_overridden(self): + """Asserts members of `Estimator` are not overridden.""" + _assert_members_are_not_overridden(Estimator, self) + + def export_saved_model(self, + export_dir_base, + serving_input_receiver_fn, + assets_extra=None, + as_text=False, + checkpoint_path=None, + experimental_mode=ModeKeys.PREDICT): + # pylint: disable=line-too-long + """Exports inference graph as a `SavedModel` into the given dir. + + For a detailed guide on SavedModel, see + [Using the SavedModel format] + (https://tensorflow.org/guide/saved_model#savedmodels_from_estimators). + + This method builds a new graph by first calling the + `serving_input_receiver_fn` to obtain feature `Tensor`s, and then calling + this `Estimator`'s `model_fn` to generate the model graph based on those + features. It restores the given checkpoint (or, lacking that, the most + recent checkpoint) into this graph in a fresh session. Finally it creates + a timestamped export directory below the given `export_dir_base`, and writes + a `SavedModel` into it containing a single `tf.MetaGraphDef` saved from this + session. + + The exported `MetaGraphDef` will provide one `SignatureDef` for each + element of the `export_outputs` dict returned from the `model_fn`, named + using the same keys. One of these keys is always + `tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY`, + indicating which signature will be served when a serving request does not + specify one. For each signature, the outputs are provided by the + corresponding `tf.estimator.export.ExportOutput`s, and the inputs are always + the input receivers provided by the `serving_input_receiver_fn`. + + Extra assets may be written into the `SavedModel` via the `assets_extra` + argument. This should be a dict, where each key gives a destination path + (including the filename) relative to the assets.extra directory. The + corresponding value gives the full path of the source file to be copied. + For example, the simple case of copying a single file without renaming it + is specified as `{'my_asset_file.txt': '/path/to/my_asset_file.txt'}`. + + The experimental_mode parameter can be used to export a single + train/eval/predict graph as a `SavedModel`. + See `experimental_export_all_saved_models` for full docs. + + Args: + export_dir_base: A string containing a directory in which to create + timestamped subdirectories containing exported `SavedModel`s. + serving_input_receiver_fn: A function that takes no argument and returns a + `tf.estimator.export.ServingInputReceiver` or + `tf.estimator.export.TensorServingInputReceiver`. + assets_extra: A dict specifying how to populate the assets.extra directory + within the exported `SavedModel`, or `None` if no extra assets are + needed. + as_text: whether to write the `SavedModel` proto in text format. + checkpoint_path: The checkpoint path to export. If `None` (the default), + the most recent checkpoint found within the model directory is chosen. + experimental_mode: `tf.estimator.ModeKeys` value indicating with mode will + be exported. Note that this feature is experimental. + + Returns: + The path to the exported directory as a bytes object. + + Raises: + ValueError: if no `serving_input_receiver_fn` is provided, no + `export_outputs` are provided, or no checkpoint can be found. + """ + # pylint: enable=line-too-long + if not serving_input_receiver_fn: + raise ValueError('An input_receiver_fn must be defined.') + + input_receiver_fn_map = {experimental_mode: serving_input_receiver_fn} + + return self._export_all_saved_models( + export_dir_base, + input_receiver_fn_map, + assets_extra=assets_extra, + as_text=as_text, + checkpoint_path=checkpoint_path, + strip_default_attrs=True) + + def experimental_export_all_saved_models(self, + export_dir_base, + input_receiver_fn_map, + assets_extra=None, + as_text=False, + checkpoint_path=None): + """Exports a `SavedModel` with `tf.MetaGraphDefs` for each requested mode. + + For each mode passed in via the `input_receiver_fn_map`, + this method builds a new graph by calling the `input_receiver_fn` to obtain + feature and label `Tensor`s. Next, this method calls the `Estimator`'s + `model_fn` in the passed mode to generate the model graph based on + those features and labels, and restores the given checkpoint + (or, lacking that, the most recent checkpoint) into the graph. + Only one of the modes is used for saving variables to the `SavedModel` + (order of preference: `tf.estimator.ModeKeys.TRAIN`, + `tf.estimator.ModeKeys.EVAL`, then + `tf.estimator.ModeKeys.PREDICT`), such that up to three + `tf.MetaGraphDefs` are saved with a single set of variables in a single + `SavedModel` directory. + + For the variables and `tf.MetaGraphDefs`, a timestamped export directory + below `export_dir_base`, and writes a `SavedModel` into it containing the + `tf.MetaGraphDef` for the given mode and its associated signatures. + + For prediction, the exported `MetaGraphDef` will provide one `SignatureDef` + for each element of the `export_outputs` dict returned from the `model_fn`, + named using the same keys. One of these keys is always + `tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY`, + indicating which signature will be served when a serving request does not + specify one. For each signature, the outputs are provided by the + corresponding `tf.estimator.export.ExportOutput`s, and the inputs are always + the input receivers provided by the `serving_input_receiver_fn`. + + For training and evaluation, the `train_op` is stored in an extra + collection, and loss, metrics, and predictions are included in a + `SignatureDef` for the mode in question. + + Extra assets may be written into the `SavedModel` via the `assets_extra` + argument. This should be a dict, where each key gives a destination path + (including the filename) relative to the assets.extra directory. The + corresponding value gives the full path of the source file to be copied. + For example, the simple case of copying a single file without renaming it + is specified as `{'my_asset_file.txt': '/path/to/my_asset_file.txt'}`. + + Args: + export_dir_base: A string containing a directory in which to create + timestamped subdirectories containing exported `SavedModel`s. + input_receiver_fn_map: dict of `tf.estimator.ModeKeys` to + `input_receiver_fn` mappings, where the `input_receiver_fn` is a + function that takes no arguments and returns the appropriate subclass of + `InputReceiver`. + assets_extra: A dict specifying how to populate the assets.extra directory + within the exported `SavedModel`, or `None` if no extra assets are + needed. + as_text: whether to write the `SavedModel` proto in text format. + checkpoint_path: The checkpoint path to export. If `None` (the default), + the most recent checkpoint found within the model directory is chosen. + + Returns: + The path to the exported directory as a bytes object. + + Raises: + ValueError: if any `input_receiver_fn` is `None`, no `export_outputs` + are provided, or no checkpoint can be found. + """ + return self._export_all_saved_models( + export_dir_base, + input_receiver_fn_map, + assets_extra=assets_extra, + as_text=as_text, + checkpoint_path=checkpoint_path, + strip_default_attrs=True) + + def _export_all_saved_models(self, + export_dir_base, + input_receiver_fn_map, + assets_extra=None, + as_text=False, + checkpoint_path=None, + strip_default_attrs=True): + """Exports multiple modes in the model function to a SavedModel.""" + # TODO(b/65561022): Consider allowing multiple input_receiver_fns per mode. + with context.graph_mode(): + if not checkpoint_path: + # Locate the latest checkpoint + checkpoint_path = self.latest_checkpoint() + if not checkpoint_path: + if self._warm_start_settings: + checkpoint_path = self._warm_start_settings.ckpt_to_initialize_from + if tf.compat.v1.gfile.IsDirectory(checkpoint_path): + checkpoint_path = tf.train.latest_checkpoint(checkpoint_path) + else: + raise ValueError("Couldn't find trained model at {}.".format( + self._model_dir)) + + export_dir = export_lib.get_timestamped_export_dir(export_dir_base) + temp_export_dir = export_lib.get_temp_export_dir(export_dir) + + builder = tf.compat.v1.saved_model.Builder(temp_export_dir) + + save_variables = True + # Note that the order in which we run here matters, as the first + # mode we pass through will be used to save the variables. We run TRAIN + # first, as that is also the mode used for checkpoints, and therefore + # we are not likely to have vars in PREDICT that are not in the checkpoint + # created by TRAIN. + if input_receiver_fn_map.get(ModeKeys.TRAIN): + self._add_meta_graph_for_mode( + builder, + input_receiver_fn_map, + checkpoint_path, + save_variables, + mode=ModeKeys.TRAIN, + strip_default_attrs=strip_default_attrs) + save_variables = False + if input_receiver_fn_map.get(ModeKeys.EVAL): + self._add_meta_graph_for_mode( + builder, + input_receiver_fn_map, + checkpoint_path, + save_variables, + mode=ModeKeys.EVAL, + strip_default_attrs=strip_default_attrs) + save_variables = False + if input_receiver_fn_map.get(ModeKeys.PREDICT): + self._add_meta_graph_for_mode( + builder, + input_receiver_fn_map, + checkpoint_path, + save_variables, + mode=ModeKeys.PREDICT, + strip_default_attrs=strip_default_attrs) + save_variables = False + + if save_variables: + raise ValueError('No valid modes for exporting found. Got {}.'.format( + input_receiver_fn_map.keys())) + + builder.save(as_text) + + # Add the extra assets + if assets_extra: + assets_extra_path = os.path.join( + tf.compat.as_bytes(temp_export_dir), + tf.compat.as_bytes('assets.extra')) + for dest_relative, source in assets_extra.items(): + dest_absolute = os.path.join( + tf.compat.as_bytes(assets_extra_path), + tf.compat.as_bytes(dest_relative)) + dest_path = os.path.dirname(dest_absolute) + tf.compat.v1.gfile.MakeDirs(dest_path) + tf.compat.v1.gfile.Copy(source, dest_absolute) + + tf.compat.v1.gfile.Rename(temp_export_dir, export_dir) + return export_dir + + def _add_meta_graph_for_mode(self, + builder, + input_receiver_fn_map, + checkpoint_path, + save_variables=True, + mode=ModeKeys.PREDICT, + export_tags=None, + check_variables=True, + strip_default_attrs=True): + """Loads variables and adds them along with a `tf.MetaGraphDef` for saving. + + Args: + builder: instance of `tf.saved_modle.builder.SavedModelBuilder` that will + be used for saving. + input_receiver_fn_map: dict of `tf.estimator.ModeKeys` to + `input_receiver_fn` mappings, where the `input_receiver_fn` is a + function that takes no argument and returns the appropriate subclass of + `InputReceiver`. + checkpoint_path: The checkpoint path to export. + save_variables: bool, whether variables should be saved. If `False`, just + the `tf.MetaGraphDef` will be saved. Note that `save_variables` should + only be `True` for the first call to this function, and the + `SavedModelBuilder` will raise an error if that is not the case. + mode: `tf.estimator.ModeKeys` value indicating which mode will be + exported. + export_tags: The set of tags with which to save `tf.MetaGraphDef`. If + `None`, a default set will be selected to matched the passed mode. + check_variables: bool, whether to check the checkpoint has all variables. + strip_default_attrs: bool, whether to strip default attributes. This may + only be True when called from the deprecated V1 + Estimator.export_savedmodel. + + Raises: + ValueError: if `save_variables` is `True` and `check_variable` is `False`. + """ + if export_tags is None: + export_tags = export_lib.EXPORT_TAG_MAP[mode] + input_receiver_fn = input_receiver_fn_map[mode] + + with tf.Graph().as_default() as g: + self._create_and_assert_global_step(g) + tf.compat.v1.random.set_random_seed(self._config.tf_random_seed) + + input_receiver = input_receiver_fn() + + # Call the model_fn and collect the export_outputs. + estimator_spec = self._call_model_fn( + features=input_receiver.features, + labels=getattr(input_receiver, 'labels', None), + mode=mode, + config=self.config) + + export_outputs = export_lib.export_outputs_for_mode( + mode=estimator_spec.mode, + serving_export_outputs=estimator_spec.export_outputs, + predictions=estimator_spec.predictions, + loss=estimator_spec.loss, + metrics=estimator_spec.eval_metric_ops) + + # Build the SignatureDefs from receivers and all outputs + signature_def_map = export_lib.build_all_signature_defs( + input_receiver.receiver_tensors, + export_outputs, + getattr(input_receiver, 'receiver_tensors_alternatives', None), + serving_only=(mode == ModeKeys.PREDICT)) + + with tf.compat.v1.Session(config=self._session_config) as session: + + if estimator_spec.scaffold.local_init_op is not None: + local_init_op = estimator_spec.scaffold.local_init_op + else: + local_init_op = tf.compat.v1.train.Scaffold.default_local_init_op() + + # This saver will be used both for restoring variables now, + # and in saving out the metagraph below. This ensures that any + # Custom Savers stored with the Scaffold are passed through to the + # SavedModel for restore later. + if isinstance(estimator_spec.scaffold.saver, trackable_util.Checkpoint): + graph_saver = tf.compat.v1.train.Saver( + var_list=graph_view.ObjectGraphView( + estimator_spec.scaffold.saver).frozen_saveable_objects(), + sharded=True) + else: + graph_saver = ( + estimator_spec.scaffold.saver or + tf.compat.v1.train.Saver(sharded=True)) + + if save_variables and not check_variables: + raise ValueError('If `save_variables` is `True, `check_variables`' + 'must not be `False`.') + if check_variables: + try: + graph_saver.restore(session, checkpoint_path) + except tf.errors.NotFoundError as e: + msg = ('Could not load all requested variables from checkpoint. ' + 'Please make sure your model_fn does not expect variables ' + 'that were not saved in the checkpoint.\n\n' + 'Encountered error with mode `{}` while restoring ' + 'checkpoint from: `{}`. Full Traceback:\n\n{}').format( + mode, checkpoint_path, e) + raise ValueError(msg) + + # We add the train op explicitly for now, so that we don't have to + # change the Builder public interface. Note that this is a no-op + # for prediction, where train_op is None. + builder._add_train_op(estimator_spec.train_op) # pylint: disable=protected-access + + meta_graph_kwargs = dict( + tags=export_tags, + signature_def_map=signature_def_map, + assets_collection=tf.compat.v1.get_collection( + tf.compat.v1.GraphKeys.ASSET_FILEPATHS), + main_op=local_init_op, + saver=graph_saver, + strip_default_attrs=strip_default_attrs) + + if save_variables: + builder.add_meta_graph_and_variables(session, **meta_graph_kwargs) + else: + builder.add_meta_graph(**meta_graph_kwargs) + + def _get_features_from_input_fn(self, input_fn, mode): + """Extracts the `features` from return values of `input_fn`.""" + result = self._call_input_fn(input_fn, mode) + result, _, hooks = estimator_util.parse_input_fn_result(result) + self._validate_features_in_predict_input(result) + return result, hooks + + def _validate_features_in_predict_input(self, result): + if not _has_dataset_or_queue_runner(result): + logging.warning('Input graph does not use tf.data.Dataset or contain a ' + 'QueueRunner. That means predict yields forever. ' + 'This is probably a mistake.') + + def _get_iterator_from_input_fn(self, input_fn, mode, distribution=None): + """Calls `input_fn` and returns an iterator.""" + if distribution is not None: + # pylint: disable=g-long-lambda + iterator = distribution.make_input_fn_iterator( + lambda input_context: self._call_input_fn(input_fn, mode, + input_context)) + input_hooks = [ + estimator_util.DistributedIteratorInitializerHook(iterator) + ] + else: + result = self._call_input_fn(input_fn, mode) + iterator = result.make_initializable_iterator() + input_hooks = [estimator_util._DatasetInitializerHook(iterator)] # pylint: disable=protected-access + return iterator, input_hooks + + def _get_features_and_labels_from_input_fn(self, input_fn, mode): + """Extracts the `features` and labels from return values of `input_fn`.""" + return estimator_util.parse_input_fn_result( + self._call_input_fn(input_fn, mode)) + + def _extract_batch_length(self, preds_evaluated): + """Extracts batch length of predictions.""" + batch_length = None + for key, value in six.iteritems(preds_evaluated): + batch_length = batch_length or value.shape[0] + if value.shape[0] != batch_length: + raise ValueError('Batch length of predictions should be same. %s has ' + 'different batch length than others.' % key) + return batch_length + + def _extract_keys(self, predictions, predict_keys): + """Extracts `predict_keys` from `predictions`.""" + if not predict_keys: + return predictions + if not isinstance(predictions, dict): + raise ValueError( + 'predict_keys argument is not valid in case of non-dict predictions.') + existing_keys = predictions.keys() + predictions = { + key: value + for key, value in six.iteritems(predictions) + if key in predict_keys + } + if not predictions: + raise ValueError('Expected to run at least one output from %s, ' + 'provided %s.' % (existing_keys, predict_keys)) + return predictions + + def _create_global_step(self, graph): + """Creates the global step tensor in graph. + + The global step tensor must be an integer type with name 'global_step' and + be added to the collection `tf.GraphKeys.GLOBAL_STEP`. + + Args: + graph: The graph in which to create the global step tensor. + + Returns: + The global step `tf.Tensor`. + """ + return tf.compat.v1.train.create_global_step(graph) + + def _create_and_assert_global_step(self, graph): + """Creates and asserts properties of the global step. + + Args: + graph: The graph in which to create the global step tensor. + + Returns: + The global step `tf.Tensor`. + """ + step = self._create_global_step(graph) + assert step is tf.compat.v1.train.get_global_step() + assert step.dtype.is_integer + return step + + def _call_input_fn(self, input_fn, mode, input_context=None): + """Calls the input function. + + Args: + input_fn: The input function. + mode: `tf.estimator.ModeKeys` + + Returns: + The return value of the passed `input_fn`, which should be one of: + + * A 'tf.data.Dataset' object: Outputs of `Dataset` object must be a + tuple `(features, labels)` with same constraints as below. + * A tuple `(features, labels)`: Where `features` is a `Tensor` or a + dictionary of string feature name to `Tensor` and `labels` is a + `Tensor` or a dictionary of string label name to `Tensor`. Both + `features` and `labels` are consumed by `model_fn`. They should + satisfy the expectation of `model_fn` from inputs. + + Raises: + ValueError: if `input_fn` takes invalid arguments. + """ + input_fn_args = function_utils.fn_args(input_fn) + kwargs = {} + if 'mode' in input_fn_args: + kwargs['mode'] = mode + if 'params' in input_fn_args: + kwargs['params'] = self.params + if 'config' in input_fn_args: + kwargs['config'] = self.config + if input_context and 'input_context' in input_fn_args: + tf.compat.v1.logging.info( + 'The `input_fn` accepts an `input_context` which will ' + 'be given by DistributionStrategy') + kwargs['input_context'] = input_context + with tf.compat.v1.device('/cpu:0'): + return input_fn(**kwargs) + + def _call_model_fn(self, features, labels, mode, config): + """Calls model function. + + Args: + features: features dict. + labels: labels dict. + mode: `tf.estimator.ModeKeys` + config: `tf.estimator.RunConfig` + + Returns: + An `tf.estimator.EstimatorSpec` object. + + Raises: + ValueError: if `model_fn` returns invalid objects. + """ + model_fn_args = function_utils.fn_args(self._model_fn) + kwargs = {} + if 'labels' in model_fn_args: + kwargs['labels'] = labels + else: + if labels is not None: + raise ValueError( + 'model_fn does not take labels, but input_fn returns labels.') + if 'mode' in model_fn_args: + kwargs['mode'] = mode + if 'params' in model_fn_args: + kwargs['params'] = self.params + if 'config' in model_fn_args: + kwargs['config'] = config + + logging.info('Calling model_fn.') + model_fn_results = self._model_fn(features=features, **kwargs) + logging.info('Done calling model_fn.') + + if not isinstance(model_fn_results, model_fn_lib.EstimatorSpec): + raise ValueError('model_fn should return an EstimatorSpec.') + + return model_fn_results + + def _train_model(self, input_fn, hooks, saving_listeners): + if self._train_distribution: + return self._train_model_distributed(input_fn, hooks, saving_listeners) + else: + return self._train_model_default(input_fn, hooks, saving_listeners) + + def _train_model_default(self, input_fn, hooks, saving_listeners): + """Initiate training with `input_fn`, without `DistributionStrategies`. + + Args: + input_fn: A function that provides input data for training as minibatches. + hooks: List of `tf.train.SessionRunHook` subclass instances. Used for + callbacks inside the training loop. + saving_listeners: list of `tf.train.CheckpointSaverListener` objects. Used + for callbacks that run immediately before or after checkpoint savings. + + Returns: + Loss from training + """ + worker_hooks = [] + with tf.Graph().as_default() as g, g.device(self._device_fn): + tf.compat.v1.random.set_random_seed(self._config.tf_random_seed) + global_step_tensor = self._create_and_assert_global_step(g) + + # Skip creating a read variable if _create_and_assert_global_step + # returns None (e.g. tf.contrib.estimator.SavedModelEstimator). + if global_step_tensor is not None: + training_util._get_or_create_global_step_read(g) # pylint: disable=protected-access + + features, labels, input_hooks = ( + self._get_features_and_labels_from_input_fn(input_fn, ModeKeys.TRAIN)) + worker_hooks.extend(input_hooks) + estimator_spec = self._call_model_fn(features, labels, ModeKeys.TRAIN, + self.config) + global_step_tensor = tf.compat.v1.train.get_global_step(g) + return self._train_with_estimator_spec(estimator_spec, worker_hooks, + hooks, global_step_tensor, + saving_listeners) + + def _train_model_distributed(self, input_fn, hooks, saving_listeners): + """Initiate training with `input_fn`, using `DistributionStrategies`. + + Args: + input_fn: A function that provides input data for training as minibatches. + hooks: List of `tf.train.SessionRunHook` subclass instances. Used for + callbacks inside the training loop. + saving_listeners: list of `tf.train.CheckpointSaverListener` objects. Used + for callbacks that run immediately before or after checkpoint savings. + + Returns: + Loss from training + """ + # pylint: disable=protected-access + if (hasattr(self._config, '_distribute_coordinator_mode') and + self._config._distribute_coordinator_mode): # pylint: disable=protected-access + distribute_coordinator_training.estimator_train( + self, + lambda est, s, train_hooks: est._actual_train_model_distributed( # pylint: disable=g-long-lambda + s, input_fn, train_hooks, saving_listeners), + hooks) + return self + else: + self._config._train_distribute.configure(self._config.session_config) + return self._actual_train_model_distributed( + self._config._train_distribute, input_fn, hooks, saving_listeners) + # pylint: enable=protected-access + + def _actual_train_model_distributed(self, strategy, input_fn, hooks, + saving_listeners): + """That method that does actual training with distribution strategy.""" + # TODO(sourabhbajaj): Remove this hack once we migrate the other strategies + # to use the new API + is_tpu_strategy = strategy.__class__.__name__.startswith('TPUStrategy') + + worker_hooks = [] + with tf.Graph().as_default() as g: + # We want to create the iterations variable outside the distribution scope + # as that is just stored on the host and mainly used to drive the loop + # and doesn't need to be a Mirrored/Device variable. + if is_tpu_strategy: + steps_per_run_variable = training.get_or_create_steps_per_run_variable() + + # Set flag on the distribution strategy so that optimizer v1 is + # distribution aware and scales the losses by number of replicas. + # This is required only for backward compatibility with estimator and + # V1 optimizer. TF2 will not do this scaling. + if hasattr(strategy, '_scale_loss_for_estimator_enabled'): + scale_ctx = strategy._scale_loss_for_estimator_enabled() # pylint: disable=protected-access + else: + # TODO(psv): Remove this clause after estimator repo gets the + # distribute library changes related to loss scaling. + @tf_contextlib.contextmanager + def nullcontextmanager(): + yield + + scale_ctx = nullcontextmanager() + + with strategy.scope(), scale_ctx: + tf.compat.v1.random.set_random_seed(self._config.tf_random_seed) + iterator, input_hooks = self._get_iterator_from_input_fn( + input_fn, ModeKeys.TRAIN, strategy) + worker_hooks.extend(input_hooks) + global_step_tensor = self._create_and_assert_global_step(g) + # we want to add to the global collection in the main thread not the + # replica threads. + tf.compat.v1.add_to_collection( + training_util.GLOBAL_STEP_READ_KEY, + strategy.extended.read_var(global_step_tensor)) + + if is_tpu_strategy: + # Create a step_fn from the train_op of grouped_estimator_spec + def step_fn(ctx, inputs): + """A single step that is passed to run_on_dataset.""" + if isinstance(inputs, tuple): + features, labels = inputs + else: + features = inputs + labels = None + estimator_spec = strategy.extended.call_for_each_replica( + self._call_model_fn, + args=(features, labels, ModeKeys.TRAIN, self.config)) + ctx.set_last_step_output( + name='loss', + output=estimator_spec.loss, + reduce_op=_get_loss_reduce_op_for_reporting()) + ctx.set_non_tensor_output( + name='estimator_spec', output=estimator_spec) + return estimator_spec.train_op + + # Create new train_op post graph rewrites + initial_training_loss = tf.constant(1e7) + ctx = strategy.extended.experimental_run_steps_on_iterator( + step_fn, + iterator, + iterations=steps_per_run_variable, + initial_loop_values={'loss': initial_training_loss}) + distributed_train_op = ctx.run_op + loss = ctx.last_step_outputs['loss'] + grouped_estimator_spec = ctx.non_tensor_outputs['estimator_spec'] + else: + features, labels = estimator_util.parse_iterator_result( + iterator.get_next()) + grouped_estimator_spec = strategy.extended.call_for_each_replica( + self._call_model_fn, + args=( + features, + labels, # although this will be None it seems + ModeKeys.TRAIN, + self.config)) + loss = strategy.reduce( + _get_loss_reduce_op_for_reporting(), + grouped_estimator_spec.loss, + axis=None) + distributed_train_op = grouped_estimator_spec.train_op + + scaffold = _combine_distributed_scaffold( + grouped_estimator_spec.scaffold, strategy) + + # TODO(yuefengz): add a test for unwrapping per_device_hooks. + def get_hooks_from_the_first_device(per_device_hooks): + return [ + self._train_distribution.experimental_local_results( + per_device_hook)[0] for per_device_hook in per_device_hooks + ] + + training_hooks = get_hooks_from_the_first_device( + grouped_estimator_spec.training_hooks) + training_chief_hooks = get_hooks_from_the_first_device( + grouped_estimator_spec.training_chief_hooks) + estimator_spec = model_fn_lib.EstimatorSpec( + mode=grouped_estimator_spec.mode, + loss=loss, + train_op=strategy.group(distributed_train_op), + training_hooks=training_hooks, + training_chief_hooks=training_chief_hooks, + scaffold=scaffold) + return self.c(estimator_spec, worker_hooks, + hooks, global_step_tensor, + saving_listeners) + + def _train_with_estimator_spec_distributed(self, estimator_spec, worker_hooks, + saving_listener): + """Train a model with the given Estimator Spec and Distribution Strategy.""" + if saving_listener: + raise ValueError('Saving listenor is not supported by the current ' + 'Distribution Strategies.') + #TODO: consolidate code duplication in _train_with_estimator_spec + with training.MonitoredTrainingSession( + master=self._config.master, + is_chief=self._config.is_chief, + checkpoint_dir=self._model_dir, + scaffold=estimator_spec.scaffold, + hooks=worker_hooks, + chief_only_hooks=tuple(estimator_spec.training_chief_hooks), + save_checkpoint_secs=self._config.save_checkpoints_secs, + save_checkpoint_steps=self._config.save_checkpoints_steps, + save_summaries_steps=self._config.save_summary_steps, + config=self._session_config, + max_wait_secs=self._config.session_creation_timeout_secs, + log_step_count_steps=self._config.log_step_count_steps, + save_graph_def=self._config.checkpoint_save_graph_def) as mon_sess: + loss = None + current_step = 0 + while not mon_sess.should_stop(): + current_step += 1 + # just as keras(https://github.com/tensorflow/tensorflow/blob/v2.4.1/tensorflow/python/keras/engine/training.py#L1093), + # trace should be enabled for every step + with trace.Trace('train', step_num=current_step, _r=1): + _, loss = mon_sess.run([estimator_spec.train_op, estimator_spec.loss]) + if current_step == 0: + tf.compat.v1.logging.warn('Training with estimator made no steps. ' + 'Perhaps input is empty or misspecified.') + return loss + + def _train_with_estimator_spec(self, estimator_spec, worker_hooks, hooks, + global_step_tensor, saving_listeners): + """Train a model with the given Estimator Spec.""" + if (self._warm_start_settings and + not tf.train.latest_checkpoint(self._model_dir)): + tf.compat.v1.logging.info('Warm-starting with WarmStartSettings: %s' % + (self._warm_start_settings,)) + tf.compat.v1.train.warm_start(*self._warm_start_settings) + # Check if the user created a loss summary, and add one if they didn't. + # We assume here that the summary is called 'loss'. If it is not, we will + # make another one with the name 'loss' to ensure it shows up in the right + # graph in TensorBoard. + if not any([ + x.op.name == 'loss' for x in ops.get_collection(ops.GraphKeys.SUMMARIES) + ]): + summary.scalar('loss', estimator_spec.loss) + ops.add_to_collection(ops.GraphKeys.LOSSES, estimator_spec.loss) + worker_hooks.extend(hooks) + worker_hooks.append(tf.compat.v1.train.NanTensorHook(estimator_spec.loss)) + if self._config.log_step_count_steps is not None: + worker_hooks.append( + tf.compat.v1.train.LoggingTensorHook( + { + 'loss': estimator_spec.loss, + 'step': global_step_tensor + }, + every_n_iter=self._config.log_step_count_steps)) + worker_hooks.extend(estimator_spec.training_hooks) + + if not (estimator_spec.scaffold.saver or + tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.SAVERS)): + tf.compat.v1.add_to_collection( + tf.compat.v1.GraphKeys.SAVERS, + tf.compat.v1.train.Saver( + sharded=True, + max_to_keep=self._config.keep_checkpoint_max, + keep_checkpoint_every_n_hours=( + self._config.keep_checkpoint_every_n_hours), + defer_build=True, + save_relative_paths=True)) + + if (self._config.cluster_spec and type( + self._train_distribution).__name__ in ('CollectiveAllReduceStrategy', + 'CollectiveAllReduceStrategyV1', + 'MultiWorkerMirroredStrategy')): + return self._train_with_estimator_spec_distributed( + estimator_spec, worker_hooks, saving_listeners) + + chief_hooks = [] + all_hooks = worker_hooks + list(estimator_spec.training_chief_hooks) + saver_hooks = [ + h for h in all_hooks + if isinstance(h, tf.compat.v1.train.CheckpointSaverHook) + ] + if (self._config.save_checkpoints_secs or + self._config.save_checkpoints_steps): + if not saver_hooks: + chief_hooks = [ + tf.compat.v1.train.CheckpointSaverHook( + self._model_dir, + save_secs=self._config.save_checkpoints_secs, + save_steps=self._config.save_checkpoints_steps, + scaffold=estimator_spec.scaffold, + save_graph_def=self._config.checkpoint_save_graph_def) + ] + saver_hooks = [chief_hooks[0]] + if saving_listeners: + if not saver_hooks: + raise ValueError( + 'There should be a CheckpointSaverHook to use saving_listeners. ' + 'Please set one of the RunConfig.save_checkpoints_steps or ' + 'RunConfig.save_checkpoints_secs.') + else: + # It is expected to have one CheckpointSaverHook. If multiple, we pick + # up the first one to add listener. + for listener in saving_listeners: + # pylint: disable=protected-access + if listener not in saver_hooks[0]._listeners: + saver_hooks[0]._listeners.append(listener) + # pylint: disable=protected-access + + # Add summary hooks to worker 0 if we are running with a master, to ensure + # that summaries are written at correct intervals even with long-running + # evaluations. + save_summary_steps = self._config.save_summary_steps + log_step_count_steps = self._config.log_step_count_steps + + # Check existence of appropriate cluster spec fields, as well as master and + # worker nodes. As master also performs evaluation, summary writing must + # occur on a different node. The presence of a worker is also checked to + # prevent reassigning hooks for single-replica jobs with just a master node. + if (self._config.cluster_spec and self._config.cluster_spec.jobs and + (run_config.TaskType.WORKER in self._config.cluster_spec.jobs) and + (run_config.TaskType.MASTER in self._config.cluster_spec.jobs)): + # Update config values to prevent the default hooks from being created on + # the master or other workers. + save_summary_steps = 0 + log_step_count_steps = None + + if (self._config.task_type == run_config.TaskType.WORKER and + self._config.task_id == 0): + if (self._config.save_summary_steps and + self._config.save_summary_steps > 0): + worker_hooks.append( + tf.compat.v1.train.SummarySaverHook( + save_steps=self._config.save_summary_steps, + output_dir=self._config.model_dir, + scaffold=estimator_spec.scaffold)) + + if (self._config.log_step_count_steps and + self._config.log_step_count_steps > 0): + worker_hooks.append( + tf.compat.v1.train.StepCounterHook( + every_n_steps=self._config.log_step_count_steps, + output_dir=self._config.model_dir)) + + with training.MonitoredTrainingSession( + master=self._config.master, + is_chief=self._config.is_chief, + checkpoint_dir=self._model_dir, + scaffold=estimator_spec.scaffold, + hooks=worker_hooks, + chief_only_hooks=(tuple(chief_hooks) + + tuple(estimator_spec.training_chief_hooks)), + save_checkpoint_secs=0, # Saving is handled by a hook. + save_summaries_steps=save_summary_steps, + config=self._session_config, + max_wait_secs=self._config.session_creation_timeout_secs, + log_step_count_steps=log_step_count_steps, + save_graph_def=self._config.checkpoint_save_graph_def) as mon_sess: + loss = None + current_step = 0 + + if carbon_tracker is not None: + # -------- CT ------- # + print('x ------------------------------------------------- x') + steps_per_epoch = int(training_steps / training_epochs) + print("epochs", training_epochs) + print('steps_per_epoch', steps_per_epoch) + print('x ------------------------------------------------- x') + # ------------------- # + + while not mon_sess.should_stop(): + # -------- CT ------- # + if current_step % steps_per_epoch == 0: + carbon_tracker.epoch_start() + # ------------------- # + + current_step += 1 + # just as keras(https://github.com/tensorflow/tensorflow/blob/v2.4.1/tensorflow/python/keras/engine/training.py#L1093), + # trace should be enabled for every step + with trace.Trace('train', step_num=current_step, _r=1): + _, loss = mon_sess.run([estimator_spec.train_op, estimator_spec.loss]) + + # -------- CT ------- # + if current_step % steps_per_epoch == 0: + carbon_tracker.epoch_end() + # ------------------- # + + if current_step == 0: + tf.compat.v1.logging.warn('Training with estimator made no steps. ' + 'Perhaps input is empty or misspecified.') + + # -------- CT ------- # + carbon_tracker.stop() + # ------------------- # + + else: + while not mon_sess.should_stop(): + current_step += 1 + # just as keras(https://github.com/tensorflow/tensorflow/blob/v2.4.1/tensorflow/python/keras/engine/training.py#L1093), + # trace should be enabled for every step + with trace.Trace('train', step_num=current_step, _r=1): + _, loss = mon_sess.run([estimator_spec.train_op, estimator_spec.loss]) + + if current_step == 0: + tf.compat.v1.logging.warn('Training with estimator made no steps. ' + 'Perhaps input is empty or misspecified.') + + return loss + + def _evaluate_build_graph(self, input_fn, hooks=None, checkpoint_path=None): + """Builds the graph and related hooks to run evaluation.""" + tf.compat.v1.random.set_random_seed(self._config.tf_random_seed) + self._create_and_assert_global_step(tf.compat.v1.get_default_graph()) + + if self._eval_distribution: + (scaffold, evaluation_hooks, input_hooks, update_op, eval_dict) = ( + self._call_model_fn_eval_distributed(input_fn, self.config)) + else: + (scaffold, evaluation_hooks, input_hooks, update_op, eval_dict) = ( + self._call_model_fn_eval(input_fn, self.config)) + + global_step_tensor = tf.compat.v1.train.get_global_step( + tf.compat.v1.get_default_graph()) + # Call to warm_start has to be after model_fn is called. + self._maybe_warm_start(checkpoint_path) + + if tf.compat.v1.GraphKeys.GLOBAL_STEP in eval_dict: + raise ValueError( + 'Metric with name `global_step` is not allowed, because Estimator ' + 'already defines a default metric with the same name.') + eval_dict[tf.compat.v1.GraphKeys.GLOBAL_STEP] = global_step_tensor + + all_hooks = list(input_hooks) + all_hooks.extend(hooks) + all_hooks.extend(list(evaluation_hooks or [])) + # New local variables have been added, so update the estimator spec's + # local init op if it was defined. + if scaffold and scaffold.local_init_op: + # Ensure that eval step has been created before updating local init op. + evaluation._get_or_create_eval_step() # pylint: disable=protected-access + + scaffold = tf.compat.v1.train.Scaffold( + local_init_op=tf.group( + scaffold.local_init_op, + tf.compat.v1.train.Scaffold.default_local_init_op()), + copy_from_scaffold=scaffold) + + return scaffold, update_op, eval_dict, all_hooks + + def _call_model_fn_eval(self, input_fn, config): + """Call model_fn for evaluation and handle return values.""" + features, labels, input_hooks = self._get_features_and_labels_from_input_fn( + input_fn, ModeKeys.EVAL) + + estimator_spec = self._call_model_fn(features, labels, ModeKeys.EVAL, + config) + eval_metric_ops = _verify_and_create_loss_metric( + estimator_spec.eval_metric_ops, estimator_spec.loss) + update_op, eval_dict = _extract_metric_update_ops(eval_metric_ops) + return (estimator_spec.scaffold, estimator_spec.evaluation_hooks, + input_hooks, update_op, eval_dict) + + def _call_model_fn_eval_distributed(self, input_fn, config): + """Call model_fn in distribution mode and handle return values.""" + + iterator, input_hooks = self._get_iterator_from_input_fn( + input_fn, ModeKeys.EVAL, self._eval_distribution) + + is_tpu_strategy = ( + self._eval_distribution.__class__.__name__.startswith('TPUStrategy')) + + if is_tpu_strategy: + steps_per_run_variable = training.get_or_create_steps_per_run_variable() + + def step_fn(ctx, inputs): + """Runs one step of the eval computation and captures outputs.""" + if isinstance(inputs, tuple): + features, labels = inputs + else: + features = inputs + labels = None + estimator_spec = self._eval_distribution.extended.call_for_each_replica( + self._call_model_fn, args=(features, labels, ModeKeys.EVAL, config)) + eval_metric_ops = _verify_and_create_loss_metric( + estimator_spec.eval_metric_ops, estimator_spec.loss, + self._eval_distribution) + update_op, eval_dict = _extract_metric_update_ops( + eval_metric_ops, self._eval_distribution) + ctx.set_non_tensor_output(name='estimator_spec', output=estimator_spec) + ctx.set_non_tensor_output(name='eval_dict', output=eval_dict) + return update_op + + # TODO(priyag): Fix eval step hook to account for steps_per_run. + ctx = self._eval_distribution.extended.experimental_run_steps_on_iterator( + step_fn, iterator, iterations=steps_per_run_variable) + update_op = ctx.run_op + eval_dict = ctx.non_tensor_outputs['eval_dict'] + grouped_estimator_spec = ctx.non_tensor_outputs['estimator_spec'] + else: + features, labels = estimator_util.parse_iterator_result( + iterator.get_next()) + grouped_estimator_spec = ( + self._eval_distribution.extended.call_for_each_replica( + self._call_model_fn, + args=(features, labels, ModeKeys.EVAL, config))) + eval_metric_ops = _verify_and_create_loss_metric( + grouped_estimator_spec.eval_metric_ops, grouped_estimator_spec.loss, + self._eval_distribution) + update_op, eval_dict = _extract_metric_update_ops(eval_metric_ops, + self._eval_distribution) + + scaffold = _combine_distributed_scaffold(grouped_estimator_spec.scaffold, + self._eval_distribution) + + def get_hooks_from_the_first_device(per_device_hooks): + return [ + self._eval_distribution.experimental_local_results(per_device_hook)[0] + for per_device_hook in per_device_hooks + ] + + evaluation_hooks = get_hooks_from_the_first_device( + grouped_estimator_spec.evaluation_hooks) + + return (scaffold, evaluation_hooks, input_hooks, update_op, eval_dict) + + def _evaluate_run(self, checkpoint_path, scaffold, update_op, eval_dict, + all_hooks, output_dir): + """Run evaluation.""" + eval_results = evaluation._evaluate_once( # pylint: disable=protected-access + checkpoint_path=checkpoint_path, + master=self._config.evaluation_master, + scaffold=scaffold, + eval_ops=update_op, + final_ops=eval_dict, + hooks=all_hooks, + config=self._session_config) + + current_global_step = eval_results[tf.compat.v1.GraphKeys.GLOBAL_STEP] + + _write_dict_to_summary( + output_dir=output_dir, + dictionary=eval_results, + current_global_step=current_global_step) + + if checkpoint_path: + _write_checkpoint_path_to_summary( + output_dir=output_dir, + checkpoint_path=checkpoint_path, + current_global_step=current_global_step) + + return eval_results + + def _maybe_warm_start(self, checkpoint_path): + if not checkpoint_path and self._warm_start_settings: + tf.compat.v1.logging.info('Warm-starting with WarmStartSettings: %s' % + (self._warm_start_settings,)) + tf.compat.v1.train.warm_start(*self._warm_start_settings) + + @deprecation.deprecated( + None, 'This function has been renamed, use `export_saved_model` instead.') + def export_savedmodel(self, + export_dir_base, + serving_input_receiver_fn, + assets_extra=None, + as_text=False, + checkpoint_path=None, + strip_default_attrs=False): + # pylint: disable=line-too-long + """Exports inference graph as a `SavedModel` into the given dir. + + For a detailed guide, see + [SavedModel from + Estimators.](https://www.tensorflow.org/guide/estimator#savedmodels_from_estimators). + + This method builds a new graph by first calling the + `serving_input_receiver_fn` to obtain feature `Tensor`s, and then calling + this `Estimator`'s `model_fn` to generate the model graph based on those + features. It restores the given checkpoint (or, lacking that, the most + recent checkpoint) into this graph in a fresh session. Finally it creates + a timestamped export directory below the given `export_dir_base`, and writes + a `SavedModel` into it containing a single `tf.MetaGraphDef` saved from this + session. + + The exported `MetaGraphDef` will provide one `SignatureDef` for each + element of the `export_outputs` dict returned from the `model_fn`, named + using the same keys. One of these keys is always + `tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY`, + indicating which signature will be served when a serving request does not + specify one. For each signature, the outputs are provided by the + corresponding `tf.estimator.export.ExportOutput`s, and the inputs are always + the input receivers provided by the `serving_input_receiver_fn`. + + Extra assets may be written into the `SavedModel` via the `assets_extra` + argument. This should be a dict, where each key gives a destination path + (including the filename) relative to the assets.extra directory. The + corresponding value gives the full path of the source file to be copied. + For example, the simple case of copying a single file without renaming it + is specified as `{'my_asset_file.txt': '/path/to/my_asset_file.txt'}`. + + Args: + export_dir_base: A string containing a directory in which to create + timestamped subdirectories containing exported `SavedModel`s. + serving_input_receiver_fn: A function that takes no argument and returns a + `tf.estimator.export.ServingInputReceiver` or + `tf.estimator.export.TensorServingInputReceiver`. + assets_extra: A dict specifying how to populate the assets.extra directory + within the exported `SavedModel`, or `None` if no extra assets are + needed. + as_text: whether to write the `SavedModel` proto in text format. + checkpoint_path: The checkpoint path to export. If `None` (the default), + the most recent checkpoint found within the model directory is chosen. + strip_default_attrs: Boolean. If `True`, default-valued attributes will be + removed from the `NodeDef`s. For a detailed guide, see [Stripping + Default-Valued Attributes]( + https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/saved_model/README.md#stripping-default-valued-attributes). + + Returns: + The path to the exported directory as a bytes object. + + Raises: + ValueError: if no `serving_input_receiver_fn` is provided, no + `export_outputs` are provided, or no checkpoint can be found. + """ + # pylint: enable=line-too-long + if not serving_input_receiver_fn: + raise ValueError('An input_receiver_fn must be defined.') + + return self._export_all_saved_models( + export_dir_base, {ModeKeys.PREDICT: serving_input_receiver_fn}, + assets_extra=assets_extra, + as_text=as_text, + checkpoint_path=checkpoint_path, + strip_default_attrs=strip_default_attrs) + + +@estimator_export('estimator.Estimator', v1=[]) # pylint: disable=missing-docstring +class EstimatorV2(Estimator): + __doc__ = Estimator.__doc__ + + export_savedmodel = deprecation.hide_attribute_from_api( + '`Estimator.export_savedmodel` has been deprecated. Please use ' + '`export_saved_model` instead.') + + def _assert_members_are_not_overridden(self): + """Asserts members of `Estimator` are not overridden.""" + _assert_members_are_not_overridden(EstimatorV2, self) + + +def _get_loss_reduce_op_for_reporting(): + graph = tf.compat.v1.get_default_graph() + if getattr(graph, '_is_loss_scaled_by_optimizer', False): # pylint: disable=protected-access + return tf.compat.v1.distribute.get_loss_reduction() + return tf.distribute.ReduceOp.SUM + + +def _assert_members_are_not_overridden(cls, obj): + """Assert Estimator methods are not overwritten.""" + # TPUEstimator is special cased (owned by TF). + if obj.__class__.__name__ == 'TPUEstimator': + return + + allowed_overrides = set([ + 'model_fn', '_create_and_assert_global_step', '_export_all_saved_models', + '_tf_api_names', '_tf_api_names_v1', '_estimator_api_names', + '_estimator_api_names_v1', '_estimator_api_constants', + '_estimator_api_constants_v1', 'latest_checkpoint' + ]) + + estimator_members = set([m for m in dir(cls) if not m.startswith('__')]) + subclass_members = set(obj.__class__.__dict__.keys()) + common_members = estimator_members & subclass_members - allowed_overrides + overridden_members = [ + m for m in common_members if getattr(cls, m) != getattr(obj.__class__, m) + ] + if overridden_members: + raise ValueError( + 'Subclasses of Estimator cannot override members of Estimator. ' + '{} does override {}'.format(obj.__class__, overridden_members)) + + +def _verify_and_create_loss_metric(eval_metric_ops, loss, distribution=None): + """Creates a metric for loss and throws an error if one already exists.""" + if model_fn_lib.LOSS_METRIC_KEY in eval_metric_ops: + raise ValueError( + 'Metric with name "%s" is not allowed, because Estimator ' % + (model_fn_lib.LOSS_METRIC_KEY) + + 'already defines a default metric with the same name.') + + if distribution is None: + loss_metric = tf.compat.v1.metrics.mean(loss) + else: + loss_metric = distribution.extended.call_for_each_replica( + tf.compat.v1.metrics.mean, args=(loss,)) + eval_metric_ops[model_fn_lib.LOSS_METRIC_KEY] = loss_metric + return eval_metric_ops + + +def maybe_overwrite_model_dir_and_session_config(config, model_dir): + """Overwrite estimator config by `model_dir` and `session_config` if needed. + + Args: + config: Original estimator config. + model_dir: Estimator model checkpoint directory. + + Returns: + Overwritten estimator config. + + Raises: + ValueError: Model directory inconsistent between `model_dir` and `config`. + """ + + if config is None: + config = run_config.RunConfig() + tf.compat.v1.logging.info('Using default config.') + if not isinstance(config, run_config.RunConfig): + raise ValueError( + 'config must be an instance of `RunConfig`, but provided %s.' % config) + + if config.session_config is None: + session_config = run_config.get_default_session_config() + config = run_config.RunConfig.replace(config, session_config=session_config) + + model_dir = compat_internal.path_to_str(model_dir) + if model_dir is not None: + if (getattr(config, 'model_dir', None) is not None and + config.model_dir != model_dir): + raise ValueError( + '`model_dir` are set both in constructor and `RunConfig`, but with ' + "different values. In constructor: '{}', in `RunConfig`: " + "'{}' ".format(model_dir, config.model_dir)) + if model_dir: + config = run_config.RunConfig.replace(config, model_dir=model_dir) + elif getattr(config, 'model_dir', None) is None: + model_dir = tempfile.mkdtemp() + tf.compat.v1.logging.warn('Using temporary folder as model directory: %s', + model_dir) + config = run_config.RunConfig.replace(config, model_dir=model_dir) + + return config + + +def create_per_replica_ready_for_local_init_op(scaffold): + """Create a `tf.train.Scaffold.ready_for_local_init_op` inside a replica.""" + if scaffold.ready_for_local_init_op: + return scaffold.ready_for_local_init_op + + def default_ready_for_local_init_op(): + return tf.compat.v1.report_uninitialized_variables( + tf.compat.v1.global_variables()) + + return tf.compat.v1.train.Scaffold.get_or_default( + 'ready_for_local_init_op', tf.compat.v1.GraphKeys.READY_FOR_LOCAL_INIT_OP, + default_ready_for_local_init_op) + + +def _combine_distributed_scaffold(grouped_scaffold, distribution): + """Combines scaffold(s) returned from `call_for_each_replica`.""" + + # TODO(anjalisridhar): Figure out how to resolve the following scaffold + # parameters: init_feed_dict, init_fn. + scaffold_list = distribution.experimental_local_results(grouped_scaffold) + init_feed_dict = [ + s.init_feed_dict for s in scaffold_list if s.init_feed_dict is not None + ] + if init_feed_dict: + init_feed_dict = distribution.group(init_feed_dict) + else: + init_feed_dict = None + + init_fn = [ + s._user_init_fn for s in scaffold_list if s._user_init_fn is not None # pylint: disable=protected-access + ] + if init_fn: + init_fn = init_fn[0] + else: + init_fn = None + + init_op = [s.init_op for s in scaffold_list if s.init_op is not None] + if init_op: + init_op = distribution.group(init_op) + else: + init_op = None + + def _unwrap_and_concat(value): + value = tf.nest.flatten(distribution.experimental_local_results(value)) + if len(value) != 1: + return tf.concat(value, 0) + return value[0] + + ready_op = distribution.extended.call_for_each_replica( + lambda scaffold: scaffold.ready_op, args=(grouped_scaffold,)) + if ready_op is not None: + ready_op = _unwrap_and_concat(ready_op) + + ready_for_local_init_op = distribution.extended.call_for_each_replica( + create_per_replica_ready_for_local_init_op, args=(grouped_scaffold,)) + if ready_for_local_init_op is not None: + ready_for_local_init_op = _unwrap_and_concat(ready_for_local_init_op) + else: + ready_for_local_init_op = None + + local_init_op = [ + s.local_init_op for s in scaffold_list if s.local_init_op is not None + ] + if local_init_op: + local_init_op = distribution.group(local_init_op) + else: + local_init_op = None + + summary_op = [s.summary_op for s in scaffold_list if s.summary_op is not None] + if summary_op: + summary_op = distribution.group(summary_op) + else: + summary_op = None + + savers = [s.saver for s in scaffold_list if s.saver is not None] + if savers: + saver = savers[0] + else: + saver = None + + scaffold = tf.compat.v1.train.Scaffold( + init_op=init_op, + ready_op=ready_op, + ready_for_local_init_op=ready_for_local_init_op, + local_init_op=local_init_op, + summary_op=summary_op, + saver=saver, + init_feed_dict=init_feed_dict, + init_fn=init_fn) + return scaffold + + +def _check_checkpoint_available(model_dir): + latest_path = tf.train.latest_checkpoint(model_dir) + if not latest_path: + raise ValueError( + 'Could not find trained model in model_dir: {}.'.format(model_dir)) + + +def _check_hooks_type(hooks): + """Returns hooks if all are `SessionRunHook`, raises TypeError otherwise.""" + hooks = list(hooks or []) + for h in hooks: + if not isinstance(h, tf.compat.v1.train.SessionRunHook): + raise TypeError('Hooks must be a SessionRunHook, given: {}'.format(h)) + return hooks + + +def _check_listeners_type(saving_listeners): + """Check listeners type.""" + listeners = list(saving_listeners or []) + for l in listeners: + if not isinstance(l, tf.compat.v1.train.CheckpointSaverListener): + raise TypeError( + 'saving_listeners must be a list of CheckpointSaverListener, ' + 'given: {}'.format(l)) + return listeners + + +def _get_replica_device_setter(config): + """Creates a replica device setter if required as a default `device_fn`. + + `Estimator` uses `tf.train.ReplicaDeviceSetter` as a default device placer. It + sets the distributed related arguments such as number of `ps_replicas` based + on given `config`. + + Args: + config: A `tf.estimator.RunConfig` instance. + + Returns: + A replica device setter, or `None`. + """ + if config.task_type: + worker_device = '/job:%s/task:%d' % (config.task_type, config.task_id) + else: + worker_device = '/job:worker' + + if config.num_ps_replicas > 0: + return tf.compat.v1.train.replica_device_setter( + ps_tasks=config.num_ps_replicas, + worker_device=worker_device, + merge_devices=True, + ps_ops=list(device_setter.STANDARD_PS_OPS), + cluster=config.cluster_spec) + else: + return None + + +def _verify_model_fn_args(model_fn, params): + """Verifies `model_fn` arguments.""" + args = set(function_utils.fn_args(model_fn)) + if 'features' not in args: + raise ValueError('model_fn (%s) must include features argument.' % model_fn) + if params is not None and 'params' not in args: + raise ValueError('model_fn (%s) does not include params argument, ' + 'but params (%s) is passed to Estimator.' % + (model_fn, params)) + if params is None and 'params' in args: + tf.compat.v1.logging.warn( + 'Estimator\'s model_fn (%s) includes params ' + 'argument, but params are not passed to Estimator.', model_fn) + non_valid_args = list(args - _VALID_MODEL_FN_ARGS) + if non_valid_args: + raise ValueError('model_fn (%s) has following not expected args: %s' % + (model_fn, non_valid_args)) + + +def _load_global_step_from_checkpoint_dir(checkpoint_dir): + try: + checkpoint_reader = tf.compat.v1.train.NewCheckpointReader( + tf.train.latest_checkpoint(checkpoint_dir)) + return checkpoint_reader.get_tensor(tf.compat.v1.GraphKeys.GLOBAL_STEP) + except: # pylint: disable=bare-except + return 0 + + +def _extract_metric_update_ops(eval_dict, distribution=None): + """Separate update operations from metric value operations.""" + update_ops = [] + value_ops = {} + # Sort metrics lexicographically so graph is identical every time. + for name, value in sorted(six.iteritems(eval_dict)): + value_ops[name] = value[0] + update_ops.append( + distribution.group(value[1]) if distribution else value[1]) + + update_op = tf.group(*update_ops) if update_ops else None + return update_op, value_ops + + +def _dict_to_str(dictionary): + """Get a `str` representation of a `dict`. + + Args: + dictionary: The `dict` to be represented as `str`. + + Returns: + A `str` representing the `dictionary`. + """ + return ', '.join('%s = %s' % (k, v) + for k, v in sorted(six.iteritems(dictionary)) + if not isinstance(v, six.binary_type)) + + +def _write_dict_to_summary(output_dir, dictionary, current_global_step): + """Writes a `dict` into summary file in given output directory. + + Args: + output_dir: `str`, directory to write the summary file in. + dictionary: the `dict` to be written to summary file. + current_global_step: `int`, the current global step. + """ + tf.compat.v1.logging.info('Saving dict for global step %d: %s', + current_global_step, _dict_to_str(dictionary)) + summary_writer = tf.compat.v1.summary.FileWriterCache.get(output_dir) + summary_proto = summary_pb2.Summary() + for key in dictionary: + if dictionary[key] is None: + continue + if key == 'global_step': + continue + if (isinstance(dictionary[key], np.float32) or + isinstance(dictionary[key], float)): + summary_proto.value.add(tag=key, simple_value=float(dictionary[key])) + elif (isinstance(dictionary[key], np.int64) or + isinstance(dictionary[key], np.int32) or + isinstance(dictionary[key], int)): + summary_proto.value.add(tag=key, simple_value=int(dictionary[key])) + elif isinstance(dictionary[key], six.binary_type): + try: + summ = summary_pb2.Summary.FromString(dictionary[key]) + for i, _ in enumerate(summ.value): + summ.value[i].tag = '%s/%d' % (key, i) + summary_proto.value.extend(summ.value) + except message.DecodeError: + tf.compat.v1.logging.warn( + 'Skipping summary for %s, cannot parse string to Summary.', key) + continue + elif isinstance(dictionary[key], np.ndarray): + value = summary_proto.value.add() + value.tag = key + value.node_name = key + tensor_proto = tf.make_tensor_proto(dictionary[key]) + value.tensor.CopyFrom(tensor_proto) + # pylint: disable=line-too-long + tf.compat.v1.logging.info( + 'Summary for np.ndarray is not visible in Tensorboard by default. ' + 'Consider using a Tensorboard plugin for visualization (see ' + 'https://github.com/tensorflow/tensorboard-plugin-example/blob/master/README.md' + ' for more information).') + # pylint: enable=line-too-long + else: + tf.compat.v1.logging.warn( + 'Skipping summary for %s, must be a float, np.float32, np.int64, ' + 'np.int32 or int or np.ndarray or a serialized string of Summary.', + key) + summary_writer.add_summary(summary_proto, current_global_step) + summary_writer.flush() + + +def _write_checkpoint_path_to_summary(output_dir, checkpoint_path, + current_global_step): + """Writes `checkpoint_path` into summary file in the given output directory. + + Args: + output_dir: `str`, directory to write the summary file in. + checkpoint_path: `str`, checkpoint file path to be written to summary file. + current_global_step: `int`, the current global step. + """ + + checkpoint_path_tag = 'checkpoint_path' + + tf.compat.v1.logging.info('Saving \'%s\' summary for global step %d: %s', + checkpoint_path_tag, current_global_step, + checkpoint_path) + summary_proto = summary_pb2.Summary() + summary_proto.value.add( + tag=checkpoint_path_tag, + tensor=tf.make_tensor_proto(checkpoint_path, dtype=tf.dtypes.string)) + summary_writer = tf.compat.v1.summary.FileWriterCache.get(output_dir) + summary_writer.add_summary(summary_proto, current_global_step) + summary_writer.flush() + + +def _has_dataset_or_queue_runner(maybe_tensor): + """Returns `True` if `Dataset` or `QueueRunner` has been used.""" + # Check TF dataset first. Here, we use a simple algorithm to check the top + # level Tensors only, which should be sufficient for most users. + tensors = [ + x for x in tf.nest.flatten(maybe_tensor) if isinstance(x, tf.Tensor) + ] + if any([t.op.type == 'IteratorGetNext' for t in tensors]): + return True + + # Now, check queue. + return tf.compat.v1.get_default_graph().get_collection( + tf.compat.v1.GraphKeys.QUEUE_RUNNERS) + + +VocabInfo = tf.compat.v1.train.VocabInfo # pylint: disable=invalid-name +estimator_export('estimator.VocabInfo')(VocabInfo) + + +@estimator_export('estimator.WarmStartSettings') +class WarmStartSettings( + collections.namedtuple('WarmStartSettings', [ + 'ckpt_to_initialize_from', + 'vars_to_warm_start', + 'var_name_to_vocab_info', + 'var_name_to_prev_var_name', + ])): + """Settings for warm-starting in `tf.estimator.Estimators`. + + Example Use with canned `tf.estimator.DNNEstimator`: + + ``` + emb_vocab_file = tf.feature_column.embedding_column( + tf.feature_column.categorical_column_with_vocabulary_file( + "sc_vocab_file", "new_vocab.txt", vocab_size=100), + dimension=8) + emb_vocab_list = tf.feature_column.embedding_column( + tf.feature_column.categorical_column_with_vocabulary_list( + "sc_vocab_list", vocabulary_list=["a", "b"]), + dimension=8) + estimator = tf.estimator.DNNClassifier( + hidden_units=[128, 64], feature_columns=[emb_vocab_file, emb_vocab_list], + warm_start_from=ws) + ``` + + where `ws` could be defined as: + + Warm-start all weights in the model (input layer and hidden weights). + Either the directory or a specific checkpoint can be provided (in the case + of the former, the latest checkpoint will be used): + + ``` + ws = WarmStartSettings(ckpt_to_initialize_from="/tmp") + ws = WarmStartSettings(ckpt_to_initialize_from="/tmp/model-1000") + ``` + + Warm-start only the embeddings (input layer): + + ``` + ws = WarmStartSettings(ckpt_to_initialize_from="/tmp", + vars_to_warm_start=".*input_layer.*") + ``` + + Warm-start all weights but the embedding parameters corresponding to + `sc_vocab_file` have a different vocab from the one used in the current + model: + + ``` + vocab_info = tf.estimator.VocabInfo( + new_vocab=sc_vocab_file.vocabulary_file, + new_vocab_size=sc_vocab_file.vocabulary_size, + num_oov_buckets=sc_vocab_file.num_oov_buckets, + old_vocab="old_vocab.txt" + ) + ws = WarmStartSettings( + ckpt_to_initialize_from="/tmp", + var_name_to_vocab_info={ + "input_layer/sc_vocab_file_embedding/embedding_weights": vocab_info + }) + ``` + + Warm-start only `sc_vocab_file` embeddings (and no other variables), which + have a different vocab from the one used in the current model: + + ``` + vocab_info = tf.estimator.VocabInfo( + new_vocab=sc_vocab_file.vocabulary_file, + new_vocab_size=sc_vocab_file.vocabulary_size, + num_oov_buckets=sc_vocab_file.num_oov_buckets, + old_vocab="old_vocab.txt" + ) + ws = WarmStartSettings( + ckpt_to_initialize_from="/tmp", + vars_to_warm_start=None, + var_name_to_vocab_info={ + "input_layer/sc_vocab_file_embedding/embedding_weights": vocab_info + }) + ``` + + Warm-start all weights but the parameters corresponding to `sc_vocab_file` + have a different vocab from the one used in current checkpoint, and only + 100 of those entries were used: + + ``` + vocab_info = tf.estimator.VocabInfo( + new_vocab=sc_vocab_file.vocabulary_file, + new_vocab_size=sc_vocab_file.vocabulary_size, + num_oov_buckets=sc_vocab_file.num_oov_buckets, + old_vocab="old_vocab.txt", + old_vocab_size=100 + ) + ws = WarmStartSettings( + ckpt_to_initialize_from="/tmp", + var_name_to_vocab_info={ + "input_layer/sc_vocab_file_embedding/embedding_weights": vocab_info + }) + ``` + + Warm-start all weights but the parameters corresponding to `sc_vocab_file` + have a different vocab from the one used in current checkpoint and the + parameters corresponding to `sc_vocab_list` have a different name from the + current checkpoint: + + ``` + vocab_info = tf.estimator.VocabInfo( + new_vocab=sc_vocab_file.vocabulary_file, + new_vocab_size=sc_vocab_file.vocabulary_size, + num_oov_buckets=sc_vocab_file.num_oov_buckets, + old_vocab="old_vocab.txt", + old_vocab_size=100 + ) + ws = WarmStartSettings( + ckpt_to_initialize_from="/tmp", + var_name_to_vocab_info={ + "input_layer/sc_vocab_file_embedding/embedding_weights": vocab_info + }, + var_name_to_prev_var_name={ + "input_layer/sc_vocab_list_embedding/embedding_weights": + "old_tensor_name" + }) + ``` + + Warm-start all TRAINABLE variables: + + ``` + ws = WarmStartSettings(ckpt_to_initialize_from="/tmp", + vars_to_warm_start=".*") + ``` + + Warm-start all variables (including non-TRAINABLE): + + ``` + ws = WarmStartSettings(ckpt_to_initialize_from="/tmp", + vars_to_warm_start=[".*"]) + ``` + + Warm-start non-TRAINABLE variables "v1", "v1/Momentum", and "v2" but not + "v2/momentum": + + ``` + ws = WarmStartSettings(ckpt_to_initialize_from="/tmp", + vars_to_warm_start=["v1", "v2[^/]"]) + ``` + + Attributes: + ckpt_to_initialize_from: [Required] A string specifying the directory with + checkpoint file(s) or path to checkpoint from which to warm-start the + model parameters. + vars_to_warm_start: [Optional] One of the following: + + * A regular expression (string) that captures which variables to + warm-start (see tf.compat.v1.get_collection). This expression will only + consider variables in the TRAINABLE_VARIABLES collection -- if you need + to warm-start non_TRAINABLE vars (such as optimizer accumulators or + batch norm statistics), please use the below option. + * A list of strings, each a regex scope provided to + tf.compat.v1.get_collection with GLOBAL_VARIABLES (please see + tf.compat.v1.get_collection). For backwards compatibility reasons, this + is separate from the single-string argument type. + * A list of Variables to warm-start. If you do not have access to the + `Variable` objects at the call site, please use the above option. + * `None`, in which case only TRAINABLE variables specified in + `var_name_to_vocab_info` will be warm-started. + + Defaults to `'.*'`, which warm-starts all variables in the + TRAINABLE_VARIABLES collection. Note that this excludes variables such as + accumulators and moving statistics from batch norm. + var_name_to_vocab_info: [Optional] Dict of variable names (strings) to + `tf.estimator.VocabInfo`. The variable names should be "full" variables, + not the names of the partitions. If not explicitly provided, the variable + is assumed to have no (changes to) vocabulary. + var_name_to_prev_var_name: [Optional] Dict of variable names (strings) to + name of the previously-trained variable in `ckpt_to_initialize_from`. If + not explicitly provided, the name of the variable is assumed to be same + between previous checkpoint and current model. Note that this has no + effect on the set of variables that is warm-started, and only controls + name mapping (use `vars_to_warm_start` for controlling what variables to + warm-start). + """ + + def __new__(cls, + ckpt_to_initialize_from, + vars_to_warm_start='.*', + var_name_to_vocab_info=None, + var_name_to_prev_var_name=None): + if not ckpt_to_initialize_from: + raise ValueError( + '`ckpt_to_initialize_from` MUST be set in WarmStartSettings') + return super(WarmStartSettings, cls).__new__( + cls, + ckpt_to_initialize_from, + vars_to_warm_start, + var_name_to_vocab_info or {}, + var_name_to_prev_var_name or {}, + ) + + +def _get_default_warm_start_settings(warm_start_from): + """Returns default `tf.estimator.WarmStartSettings`. + + Args: + warm_start_from: Either a string representing the filepath of a checkpoint + or `SavedModel` to initialize from, or an instance of + `tf.estimator.WarmStartSettings`. + + Returns: + Either None or an instance of `WarmStartSettings`. + + Raises: + ValueError: If `warm_start_from` is not `None` but is neither a string nor + an instance of `WarmStartSettings`. + """ + if warm_start_from is None: + return None + if isinstance(warm_start_from, (six.string_types, six.binary_type)): + # Infer that this is a SavedModel if export_path + + # 'variables/variables.index' exists, and if so, construct the + # WarmStartSettings pointing to the variables path + # (export_path + 'variables/variables'). + if tf.compat.v1.gfile.Exists( + os.path.join( + saved_model_utils.get_variables_dir(warm_start_from), + tf.compat.as_text('variables.index'))): + tf.compat.v1.logging.info('Warm-starting from a SavedModel') + return WarmStartSettings( + ckpt_to_initialize_from=saved_model_utils.get_variables_path( + warm_start_from)) + return WarmStartSettings(ckpt_to_initialize_from=warm_start_from) + elif isinstance(warm_start_from, WarmStartSettings): + return warm_start_from + else: + raise ValueError('warm_start_from must be a string or a WarmStartSettings, ' + 'instead got {}'.format(type(warm_start_from))) diff --git a/exp-4-bert-squad/tf_updated_files/tpu_estimator.py b/exp-4-bert-squad/tf_updated_files/tpu_estimator.py new file mode 100644 index 0000000..29e8c45 --- /dev/null +++ b/exp-4-bert-squad/tf_updated_files/tpu_estimator.py @@ -0,0 +1,4544 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# =================================================================== +"""TPUEstimator class.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import collections +import copy +import enum +import math +import os +import signal +import sys +import threading +import time + +import tensorflow as tf +import numpy as np +import six +from six.moves import queue as Queue # pylint: disable=redefined-builtin +from six.moves import xrange # pylint: disable=redefined-builtin + +from tensorflow.core.framework import variable_pb2 +from tensorflow.core.framework.summary_pb2 import Summary +from tensorflow.core.protobuf.tpu import compilation_result_pb2 as tpu_compilation_result +from tensorflow.python.data.util import nest as data_nest +from tensorflow.python.distribute.cluster_resolver import tpu_cluster_resolver +from tensorflow.python.framework import function +from tensorflow.python.framework import ops +from tensorflow.python.ops import control_flow_ops +from tensorflow.python.ops import control_flow_util +from tensorflow.python.ops import resource_variable_ops +from tensorflow.python.ops import summary_ops_v2 +from tensorflow.python.platform import tf_logging as logging +from tensorflow.python.tpu import functional as tpu_functional +from tensorflow.python.tpu import preempted_hook +from tensorflow.python.tpu import session_support +from tensorflow.python.tpu import tensor_tracer +from tensorflow.python.tpu import tpu +from tensorflow.python.tpu import tpu_embedding_gradient +from tensorflow.python.tpu import tpu_feed +from tensorflow.python.tpu import tpu_function +from tensorflow.python.tpu import training_loop +from tensorflow.python.tpu.ops import tpu_ops +from tensorflow.python.training import evaluation +from tensorflow.python.util import function_utils +from tensorflow.python.util import tf_inspect +from tensorflow.python.util.tf_export import estimator_export +from tensorflow_estimator.python.estimator import estimator as estimator_lib +from tensorflow_estimator.python.estimator import model_fn as model_fn_lib +from tensorflow_estimator.python.estimator.export import export_output as export_output_lib +from tensorflow_estimator.python.estimator.tpu import _tpu_estimator_embedding +from tensorflow_estimator.python.estimator.tpu import error_handling +from tensorflow_estimator.python.estimator.tpu import iteration_count_estimator +from tensorflow_estimator.python.estimator.tpu import tpu_config +from tensorflow_estimator.python.estimator.tpu import tpu_context +from tensorflow_estimator.python.estimator.tpu import util as util_lib +from tensorflow_estimator.python.estimator.tpu._tpu_estimator_embedding import AdagradParameters # pylint: disable=unused-import +from tensorflow_estimator.python.estimator.tpu._tpu_estimator_embedding import AdamParameters # pylint: disable=unused-import +from tensorflow_estimator.python.estimator.tpu._tpu_estimator_embedding import EmbeddingConfigSpec # pylint: disable=unused-import +from tensorflow_estimator.python.estimator.tpu._tpu_estimator_embedding import StochasticGradientDescentParameters # pylint: disable=unused-import + +_INITIAL_LOSS = 1e7 +_ZERO_LOSS = 0. +_TPU_ESTIMATOR = 'tpu_estimator' +_ITERATIONS_PER_LOOP_VAR = 'iterations_per_loop' +_BATCH_SIZE_KEY = 'batch_size' +_CTX_KEY = 'context' +_USE_TPU_KEY = 'use_tpu' +_CROSS_REPLICA_SUM_OP = 'CrossReplicaSum' +_ONE_GIGABYTE = 1024 * 1024 * 1024 +_TPU_ENQUEUE_OPS = '_tpu_enqueue_ops' +_TPU_TRAIN_OP = '_tpu_train_op' +_INFERENCE_ON_TPU_MODE = '_inference_on_tpu' +_KEY_WHEN_PREDICTIONS_IS_A_TENSOR = '_key_when_predictions_is_a_tensor' +_TENSOR_PACKER_SMALL_FEATURE_DIM_SIZE = 1 +_TENSOR_PACKER_MINIMUM_NUM_SMALL_FEATURES_TO_GROUP = 5 +_TENSOR_PACKER_CONCATENATED_SMALL_FEATURES_KEY = '_concatenated_small_features' + +# Ideally _USE_TPU_KEY should be reserved as well. However there are already +# models that make use of this key, thus it can not be reserved now to prevent +# breakage. In the long run, we would like to mitigate this by migrating models +# off of using _USE_TPU_KEY. +_RESERVED_PARAMS_KEYS = [_BATCH_SIZE_KEY, _CTX_KEY] + +# TODO(b/65703635): Flip the value and remove all dead code. Currently, this is +# only used for per-core based deployments. For per-host based pipelines, if a +# user returns a Dataset instance it will be automatically wrapped in a +# tf.while_loop (This can be disabled by returning features and labels +# explicitly). +_WRAP_INPUT_FN_INTO_WHILE_LOOP = False + +# Track the adoption of TPUEstimator +_tpu_estimator_gauge = tf.compat.v2.__internal__.monitoring.BoolGauge( + '/tensorflow/api/tpu_estimator', + 'Whether the program uses tpu estimator or not.') + +if ops.get_to_proto_function('{}_{}'.format(_TPU_ESTIMATOR, + _ITERATIONS_PER_LOOP_VAR)) is None: + ops.register_proto_function( + '{}_{}'.format(_TPU_ESTIMATOR, _ITERATIONS_PER_LOOP_VAR), + proto_type=variable_pb2.VariableDef, + to_proto=resource_variable_ops._to_proto_fn, # pylint: disable=protected-access + from_proto=resource_variable_ops._from_proto_fn) # pylint: disable=protected-access + + +def _is_iterable(obj): + """A Python 2 and 3 compatible util to check whether `obj` is iterable.""" + try: + iter(obj) + return True + except TypeError: + return False + + +class CatchInvalidHostcallFunctions(control_flow_ops.XLAControlFlowContext): + + def AddOp(self, op): + if op.type in [ + 'AudioSummary', 'AudioSummaryV2', 'HistogramSummary', 'ImageSummary', + 'MergeSummary', 'ScalarSummary', 'TensorSummary', 'TensorSummaryV2' + ]: + raise ValueError('Please use tf.contrib.summary instead of tf.summary ' + 'inside of host_calls.') + + +def _create_global_step(graph): + graph = graph or tf.compat.v1.get_default_graph() + if tf.compat.v1.train.get_global_step(graph) is not None: + raise ValueError('"global_step" already exists.') + # Create in proper graph and base name_scope. + with graph.as_default() as g, g.name_scope(None): + return tf.compat.v1.get_variable( + tf.compat.v1.GraphKeys.GLOBAL_STEP, + shape=[], + dtype=tf.dtypes.int64, + initializer=tf.compat.v1.initializers.zeros(), + trainable=False, + use_resource=True, + collections=[ + tf.compat.v1.GraphKeys.GLOBAL_VARIABLES, + tf.compat.v1.GraphKeys.GLOBAL_STEP + ]) + + +def _create_or_get_iterations_per_loop(): + """Creates or gets the iterations_per_loop variable. + + In TPUEstimator, the user provided computation, the model_fn, is wrapped + inside a tf.while_loop for peak performance. The iterations of the loop are + specified by this variable, which adjusts its value on the CPU after each TPU + program execution and before the next TPU execution. + + The purpose of using a variable, rather then a constant, is to allow + TPUEstimator adapt the TPU training iterations according to the final steps + specified by users. For example, if the user sets the iterations_per_loop as 4 + in TPUConfig and steps as 10 in TPUEstimator.train(), the iterations_per_loop + variable will have the following value before each TPU training. + + - 1-th TPU execution: iterations_per_loop = 4 + - 2-th TPU execution: iterations_per_loop = 4 + - 3-th TPU execution: iterations_per_loop = 2 + + As model_fn increases the global step once per train_op invocation, the global + step is 10 after all TPU executions, matching the steps=10 inputs passed in by + users. + + Returns: + A TF non-trainable resource variable. + + Raises: + RuntimeError: If multi iterations_per_loop variables were found. + """ + graph = tf.compat.v1.get_default_graph() + collection_name = '{}_{}'.format(_TPU_ESTIMATOR, _ITERATIONS_PER_LOOP_VAR) + iter_vars = graph.get_collection(collection_name) + if len(iter_vars) == 1: + return iter_vars[0] + elif len(iter_vars) > 1: + raise RuntimeError('Multiple iterations_per_loop_var in collection.') + + with ops.colocate_with(tf.compat.v1.train.get_global_step()): + with tf.compat.v1.variable_scope( + _TPU_ESTIMATOR, reuse=tf.compat.v1.AUTO_REUSE): + return tf.compat.v1.get_variable( + _ITERATIONS_PER_LOOP_VAR, + initializer=tf.compat.v1.initializers.zeros(), + shape=[], + dtype=tf.dtypes.int32, + trainable=False, + collections=[collection_name, tf.compat.v1.GraphKeys.LOCAL_VARIABLES], + use_resource=True) + + +def _sync_variables_ops(ctx): + """Create varriables synchronization ops. + + Gets the variables back from TPU nodes. This means the variables updated + by TPU will now be *synced* to host memory. + In BROADCAST mode, we skip this sync since the variables are ususally too + big to transmit via RPC. + + Args: + ctx: A `_InternalTPUContext` instance with mode. + + Returns: + A list of sync ops. + """ + + if not ctx.is_input_broadcast_with_iterators(): + return [ + tf.debugging.check_numerics(v.read_value(), + 'Gradient for %s is NaN' % v.name).op + for v in tf.compat.v1.trainable_variables() + ] + else: + return [tf.no_op()] + + +def _increase_eval_step_op(iterations_per_loop): + """Returns an op to increase the eval step for TPU evaluation. + + Args: + iterations_per_loop: Tensor. The number of eval steps running in TPU system + before returning to CPU host for each `Session.run`. + + Returns: + An operation + """ + eval_step = evaluation._get_or_create_eval_step() # pylint: disable=protected-access + # Estimator evaluate increases 1 by default. So, we increase the difference. + return tf.compat.v1.assign_add( + eval_step, + tf.cast(iterations_per_loop - 1, dtype=eval_step.dtype), + use_locking=True) + + +def _extract_key_names(tensor_or_dict): + if isinstance(tensor_or_dict, dict): + return sorted(tensor_or_dict.keys()) + return [] + + +class PeriodicLogger(object): + + def __init__(self, seconds): + self._log_every_n_seconds = seconds + self._last_log_time = 0 + + def log(self, msg, *args, **kw): + if time.time() - self._last_log_time > self._log_every_n_seconds: + self._last_log_time = time.time() + tf.compat.v1.logging.info(msg, *args, **kw) + + +class _SIGNAL(object): + """Signal used to control the thread of infeed/outfeed. + + All preserved signals must be negative numbers. Positive numbers are used to + indicate the number of iterations for next training/evaluation loop. + """ + NEXT_BATCH = -1 + STOP = -2 + + +@estimator_export(v1=['estimator.tpu.TPUEstimatorSpec']) +class TPUEstimatorSpec(model_fn_lib._TPUEstimatorSpec): # pylint: disable=protected-access + """Ops and objects returned from a `model_fn` and passed to `TPUEstimator`. + + See `EstimatorSpec` for `mode`, `predictions`, `loss`, `train_op`, and + `export_outputs`. + + For evaluation, `eval_metrics `is a tuple of `metric_fn` and `tensors`, where + `metric_fn` runs on CPU to generate metrics and `tensors` represents the + `Tensor`s transferred from TPU system to CPU host and passed to `metric_fn`. + To be precise, TPU evaluation expects a slightly different signature from the + `tf.estimator.Estimator`. While `EstimatorSpec.eval_metric_ops` expects a + dict, `TPUEstimatorSpec.eval_metrics` is a tuple of `metric_fn` and `tensors`. + The `tensors` could be a list of `Tensor`s or dict of names to `Tensor`s. The + `tensors` usually specify the model logits, which are transferred back from + TPU system to CPU host. All tensors must have be batch-major, i.e., the batch + size is the first dimension. Once all tensors are available at CPU host from + all shards, they are concatenated (on CPU) and passed as positional arguments + to the `metric_fn` if `tensors` is list or keyword arguments if `tensors` is + a dict. `metric_fn` takes the `tensors` and returns a dict from metric string + name to the result of calling a metric function, namely a `(metric_tensor, + update_op)` tuple. See `TPUEstimator` for MNIST example how to specify the + `eval_metrics`. + + `scaffold_fn` is a function running on CPU to generate the `Scaffold`. This + function should not capture any Tensors in `model_fn`. + + `host_call` is a tuple of a `function` and a list or dictionary of `tensors` + to pass to that function and returns a list of Tensors. `host_call` currently + works for train() and evaluate(). The Tensors returned by the function is + executed on the CPU on every step, so there is communication overhead when + sending tensors from TPU to CPU. To reduce the overhead, try reducing the + size of the tensors. The `tensors` are concatenated along their major (batch) + dimension, and so must be >= rank 1. The `host_call` is useful for writing + summaries with `tf.contrib.summary.create_file_writer`. + + @compatibility(TF2) + TPU Estimator manages its own TensorFlow graph and session, so it is not + compatible with TF2 behaviors. We recommend that you migrate to the newer + `tf.distribute.TPUStrategy`. See the + [TPU guide](https://www.tensorflow.org/guide/tpu) for details. + @end_compatibility + """ + + def __new__(cls, + mode, + predictions=None, + loss=None, + train_op=None, + eval_metrics=None, + export_outputs=None, + scaffold_fn=None, + host_call=None, + training_hooks=None, + evaluation_hooks=None, + prediction_hooks=None): + """Creates a validated `TPUEstimatorSpec` instance.""" + cls._host_calls = {} + if eval_metrics is not None: + cls._host_calls['eval_metrics'] = eval_metrics + if host_call is not None: + cls._host_calls['host_call'] = host_call + _OutfeedHostCall.validate(cls._host_calls) + + training_hooks = tuple(training_hooks or []) + evaluation_hooks = tuple(evaluation_hooks or []) + prediction_hooks = tuple(prediction_hooks or []) + + for hook in training_hooks + evaluation_hooks + prediction_hooks: + if not isinstance(hook, tf.compat.v1.train.SessionRunHook): + raise TypeError( + 'All hooks must be SessionRunHook instances, given: {}'.format( + hook)) + + return super(TPUEstimatorSpec, cls).__new__( + cls, + mode=mode, + predictions=predictions, + loss=loss, + train_op=train_op, + eval_metrics=eval_metrics, + export_outputs=export_outputs, + scaffold_fn=scaffold_fn, + host_call=host_call, + training_hooks=training_hooks, + evaluation_hooks=evaluation_hooks, + prediction_hooks=prediction_hooks) + + def as_estimator_spec(self): + """Creates an equivalent `EstimatorSpec` used by CPU train/eval.""" + host_call_ret = _OutfeedHostCall.create_cpu_hostcall(self._host_calls) + eval_metric_ops = None + if self.eval_metrics is not None: + eval_metric_ops = host_call_ret['eval_metrics'] + hooks = None + if self.host_call is not None: + hooks = [_OutfeedHostCallHook(host_call_ret['host_call'])] + loss = self.loss + if tensor_tracer.TensorTracer.is_enabled() \ + and self.train_op is not None: + tt = tensor_tracer.TensorTracer() + loss = tt.trace_cpu(tf.compat.v1.get_default_graph(), loss, self.train_op) + + hooks = tuple(hooks or []) + scaffold = self.scaffold_fn() if self.scaffold_fn else None + return model_fn_lib.EstimatorSpec( + mode=self.mode, + predictions=self.predictions, + loss=loss, + train_op=self.train_op, + eval_metric_ops=eval_metric_ops, + export_outputs=self.export_outputs, + scaffold=scaffold, + training_hooks=self.training_hooks + hooks, + evaluation_hooks=self.evaluation_hooks + hooks, + prediction_hooks=self.prediction_hooks + hooks) + + +class _OpQueueContext(object): + """Manages work queue and thread for a infeed/outfeed thread.""" + + def __init__(self, name, target, args): + self._name = name + self._queue = Queue.Queue() + args = (self,) + args + self._thread = threading.Thread(name=name, target=target, args=args) + self._thread.daemon = True + self._thread.start() + + def stop(self): + self._queue.put(_SIGNAL.STOP) + + def send_next_batch_signal(self, iterations): + self._queue.put(iterations) + + def read_iteration_counts(self): + while True: + iterations = self._queue.get(block=True) + tf.compat.v1.logging.debug('%s read iterations %s', self._name, + iterations) + if iterations == _SIGNAL.STOP: + tf.compat.v1.logging.info('%s received shutdown signal, stopping.', + self._name) + return + yield iterations + + def join(self): + tf.compat.v1.logging.info('Shutting down %s thread.', self._name) + self.stop() + self._thread.join() + + +class _OpSignalOnceQueueContext(_OpQueueContext): + """Manages work queue and thread for a infeed/outfeed thread. + + This subclass only signals once. + """ + + def __init__(self, name, target, args): + super(_OpSignalOnceQueueContext, self).__init__(name, target, args) + self._has_signaled = False + + def send_next_batch_signal(self, iterations): + if not self._has_signaled: + self._queue.put(iterations) + self._has_signaled = True + + +class TPUInfeedOutfeedSessionHook(tf.compat.v1.train.SessionRunHook): + """A Session hook setting up the TPU initialization, infeed, and outfeed. + + This hook does two major things: + 1. initialize and shutdown TPU system. + 2. launch and join the threads for infeed enqueue and (optional) outfeed + dequeue. + """ + + def __init__(self, + ctx, + enqueue_ops, + dequeue_ops, + tpu_compile_op, + run_infeed_loop_on_coordinator=True, + rendezvous=None, + master=None, + session_config=None, + tpu_init_ops=None, + outfeed_every_n_steps=1): + self._master_job = ctx.master_job + self._enqueue_ops = enqueue_ops + self._dequeue_ops = dequeue_ops + self._rendezvous = rendezvous + self._master = master + self._session_config = session_config + self._init_ops = list(tpu_init_ops or []) + if ctx.embedding_config is None: + self._embedding_layer_config = None + else: + self._embedding_layer_config = ( + ctx.embedding_config.tpu_embedding.config_proto) + self._run_infeed_loop_on_coordinator = run_infeed_loop_on_coordinator + self._initial_infeed_sleep_secs = ( + ctx.config.tpu_config.initial_infeed_sleep_secs) + self._tpu_compile_op = tpu_compile_op + + # When using model parallelism, the TPU is pre-initialized at startup to + # fetch mesh information. We skip re-initializing it here for + # MeshTensorFlow since it places variables on TPU directly. Reinitialize tpu + # is causing the variable corruption since the previous allocated memory + # might be overwritten for other purpose. + if (ctx.model_parallelism_enabled and + (ctx.config.tpu_config.per_host_input_for_training is + tpu_config.InputPipelineConfig.BROADCAST)): + self._should_initialize_tpu = False + else: + self._should_initialize_tpu = True + self._outfeed_every_n_steps = outfeed_every_n_steps + + def begin(self): + tf.compat.v1.logging.info('TPU job name %s', self._master_job) + self._iterations_per_loop_var = _create_or_get_iterations_per_loop() + if self._should_initialize_tpu: + self._finalize_ops = [ + tf.compat.v1.tpu.shutdown_system(job=self._master_job) + ] + else: + self._finalize_ops = [] + + summary_writer_init_ops = summary_ops_v2.summary_writer_initializer_op() + self._init_ops.extend(summary_writer_init_ops) + # Get all the writer resources from the initializer, so we know what to + # flush. + for op in summary_writer_init_ops: + self._finalize_ops.append( + summary_ops_v2.legacy_raw_flush(writer=op.inputs[0])) + + def _run_infeed(self, queue_ctx, session): + tf.compat.v1.logging.info('Starting infeed thread controller.') + if self._initial_infeed_sleep_secs: + tf.compat.v1.logging.info('Infeed thread sleeping for %d seconds.', + self._initial_infeed_sleep_secs) + time.sleep(self._initial_infeed_sleep_secs) + tf.compat.v1.logging.info('Infeed thread starting after sleep') + + with self._rendezvous.catch_errors(source='infeed', session=session): + if self._run_infeed_loop_on_coordinator: + for count, steps in enumerate(queue_ctx.read_iteration_counts()): + for i in xrange(steps): + tf.compat.v1.logging.debug('Infeed enqueue for iteration (%d, %d)', + count, i) + session.run(self._enqueue_ops) + else: + for _ in queue_ctx.read_iteration_counts(): + session.run(self._enqueue_ops) + tf.compat.v1.logging.info('Infeed thread finished, shutting down.') + + def _run_outfeed(self, queue_ctx, session): + tf.compat.v1.logging.info('Starting outfeed thread controller.') + status_logger = PeriodicLogger(seconds=60) + with self._rendezvous.catch_errors(source='outfeed', session=session): + for count, steps in enumerate(queue_ctx.read_iteration_counts()): + step_counter = 0 + for i in xrange(steps): + tf.compat.v1.logging.debug('Outfeed dequeue for iteration (%d, %d)', + count, i) + if step_counter % self._outfeed_every_n_steps == 0: + session.run(self._dequeue_ops) + step_counter += 1 + status_logger.log('Outfeed finished for iteration (%d, %d)', count, i) + tf.compat.v1.logging.info('Outfeed thread finished, shutting down.') + + def _create_infeed_controller(self, name, target, args): + return _OpQueueContext(name=name, target=target, args=args) + + def _assertCompilationSucceeded(self, result, coord): + proto = tpu_compilation_result.CompilationResultProto() + proto.ParseFromString(result) + if proto.status_error_message: + tf.compat.v1.logging.error('Compilation failed: {}'.format( + proto.status_error_message)) + coord.request_stop() + else: + tf.compat.v1.logging.info('Compilation succeeded') + + def after_create_session(self, session, coord): + if self._should_initialize_tpu: + tf.compat.v1.logging.info('Init TPU system') + start = time.time() + with tf.Graph().as_default(): + with tf.compat.v1.Session( + self._master, config=self._session_config) as sess: + sess.run( + tf.compat.v1.tpu.initialize_system( + job=self._master_job, + embedding_config=self._embedding_layer_config)) + tf.compat.v1.logging.info('Initialized TPU in %d seconds', + time.time() - start) + + session.run( + self._init_ops, + options=tf.compat.v1.RunOptions(timeout_in_ms=30 * 60 * 1000)) + + if os.environ.get('TPU_SPLIT_COMPILE_AND_EXECUTE', '') == '1': + tf.compat.v1.logging.info( + 'Compiling user program: this may take a while...') + self._assertCompilationSucceeded(session.run(self._tpu_compile_op), coord) + + self._infeed_controller = self._create_infeed_controller( + name='InfeedController', target=self._run_infeed, args=(session,)) + + self._outfeed_controller = _OpQueueContext( + name='OutfeedController', target=self._run_outfeed, args=(session,)) + + # Enable the worker watchdog to terminate workers on coordinator exit. + watchdog_timeout = int(os.environ.get('TF_TPU_WATCHDOG_TIMEOUT', '0')) + if watchdog_timeout > 0: + session_support.start_worker_watchdog( + session, shutdown_timeout=watchdog_timeout) + + def before_run(self, run_context): + iterations = run_context.session.run(self._iterations_per_loop_var) + + tf.compat.v1.logging.info('Enqueue next (%d) batch(es) of data to infeed.', + iterations) + self._infeed_controller.send_next_batch_signal(iterations) + + tf.compat.v1.logging.info( + 'Dequeue next (%d) batch(es) of data from outfeed.', iterations) + self._outfeed_controller.send_next_batch_signal(iterations) + + def end(self, session): + tf.compat.v1.logging.info('Stop infeed thread controller') + self._infeed_controller.join() + self._rendezvous.record_done('infeed') + + tf.compat.v1.logging.info('Stop output thread controller') + self._outfeed_controller.join() + self._rendezvous.record_done('outfeed') + + tf.compat.v1.logging.info('Shutdown TPU system.') + session.run(self._finalize_ops) + + +class TPUInfeedOutfeedSessionHookForPrediction(TPUInfeedOutfeedSessionHook): + + def __init__(self, + ctx, + enqueue_ops, + dequeue_ops, + tpu_compile_op, + rendezvous=None, + master=None, + session_config=None): + super(TPUInfeedOutfeedSessionHookForPrediction, self).__init__( + ctx, + enqueue_ops, + dequeue_ops, + tpu_compile_op=tpu_compile_op, + run_infeed_loop_on_coordinator=False, + rendezvous=rendezvous, + master=master, + session_config=session_config) + + def _create_infeed_controller(self, name, target, args): + return _OpSignalOnceQueueContext(name=name, target=target, args=args) + + +class _TPUStopAtStepHook(tf.compat.v1.train.SessionRunHook): + """Hook that requests stop at a specified step. + + This hook is similar to the `session_run_hook._StopAfterNEvalsHook` with + following differences for TPU training: + + 1. This hook sets the variable for `iterations_per_loop`, which is used by + `TPUInfeedOutfeedSessionHook` to control the iterations for infeed/outfeed. + If the `iterations_per_loop` value is specified as time in seconds, the + number of iterations per `Session.run` will be estimated automatically + based on per iteration runtime. + + As the hook execution order is not guaranteed, the variable update is + handled in `after_create_session` and `after_run` as + `TPUInfeedOutfeedSessionHook` reads the variable value in `before_run`. + + 2. For each training loop (session.run), the global step could be increased + multiple times on TPU. The global step tensor value will be explicitly read + again in `after_run` to ensure the latest value is retrieved to avoid race + condition. + """ + + def __init__(self, + iterations_per_loop_counter, + num_steps=None, + final_step=None): + """Initializes a `TPUStopAtStepHook`. + + Args: + iterations_per_loop_counter: A namedtuple of [`value',`unit`] that + represents the number of 'iterations count' or 'time in seconds' to run + optimizer per loop, based on the `unit` specified, `count` or `seconds` + respectively. + num_steps: Number of steps to execute. + final_step: Step after which to stop. + + Raises: + ValueError: If one of the arguments is invalid. + """ + if num_steps is None and final_step is None: + raise ValueError('One of `num_steps` or `final_step` must be specified.') + if num_steps is not None and final_step is not None: + raise ValueError( + 'Only one of `num_steps` or `final_step` can be specified.') + self._iterations_per_loop_counter = iterations_per_loop_counter + if self._iterations_per_loop_counter.unit not in ['seconds', 'count']: + raise ValueError('Only `count` or `seconds` are accepted as the ' + '`iterations_per_loop_counter.unit') + self._num_steps = num_steps + self._final_step = final_step + self._next_iteration_count = 1 + self._iteration_count_estimator = None + if self._iterations_per_loop_counter.unit == 'seconds': + self._iteration_count_estimator = ( + iteration_count_estimator.IterationCountEstimator()) + self._start_time = time.time() + + def _next_iterations(self, global_step, final_step): + """Computes the next iterations count. + + The next iterations count is computed by choosing the smaller of the + remaining step count (`final_step` - `global_step`) and the estimated + iterations count returned by the estimator. + + Args: + global_step: The current step. + final_step: Step after which to stop. + + Returns: + The number of iterations count to run per loop. + """ + remaining_steps = final_step - global_step + + if self._iteration_count_estimator is not None: + estimated_iterations = self._iteration_count_estimator.get( + self._iterations_per_loop_counter.value) + else: + estimated_iterations = self._iterations_per_loop_counter.value + + self._next_iteration_count = min(remaining_steps, estimated_iterations) + return self._next_iteration_count + + def begin(self): + """Initializes variables. + + Initializes the global step and iterations per loop variables. + + Raises: + RuntimeError: An error occurred if global step variable does not exist. + """ + self._global_step_tensor = tf.compat.v1.train.get_global_step() + if self._global_step_tensor is None: + raise RuntimeError('Global step should be created.') + + self._iterations_per_loop_var = _create_or_get_iterations_per_loop() + + def after_create_session(self, session, coord): + """Computes and updates the first time iterations count. + + The iterations are computed by choosing the smaller of the (`final step` - + `global step`), and the initial estimated iterations returned by the + estimator (by default is 1). + + Args: + session: A TensorFlow Session that has been created. + coord: A Coordinator object which keeps track of all threads. + """ + global_step = session.run(self._global_step_tensor) + if self._final_step is None: + self._final_step = global_step + self._num_steps + + iterations = self._next_iterations(global_step, self._final_step) + self._iterations_per_loop_var.load(iterations, session=session) + + def before_run(self, run_context): + """Reset the timer.""" + if self._iteration_count_estimator is not None: + self._start_time = time.time() + + def after_run(self, run_context, run_values): + """Computes the next iterations per loop value or terminates. + + Computes the elapsed time to run the last optimizer loop and if the + `IterationCountEstimator` is used, records the elapsed time and iterations + count. If the final step count has been reached, terminates. Otherwise, + computes and updates the number of iterations to run the optimizer per loop. + + Args: + run_context: A `SessionRunContext` object. + run_values: A SessionRunValues object. + """ + if self._iteration_count_estimator is not None: + elapsed_time = time.time() - self._start_time + tf.compat.v1.logging.info('ElapsedTime: %.3f', elapsed_time) + self._iteration_count_estimator.update(elapsed_time, + self._next_iteration_count) + + # Global step cannot be retrieved via SessionRunArgs and before_run due to + # race condition. + global_step = run_context.session.run(self._global_step_tensor) + if global_step >= self._final_step: + run_context.request_stop() + else: + iterations = self._next_iterations(global_step, self._final_step) + self._iterations_per_loop_var.load( + iterations, session=run_context.session) + + +class _SetEvalIterationsHook(tf.compat.v1.train.SessionRunHook): + """Hook that requests stop at a specified step.""" + + def __init__(self, num_steps): + """Initializes a `_SetEvalIterationsHook`. + + Args: + num_steps: Number of steps to execute. + """ + self._num_steps = num_steps + + def begin(self): + self._iterations_per_loop_var = _create_or_get_iterations_per_loop() + + def after_create_session(self, session, coord): + self._iterations_per_loop_var.load(self._num_steps, session=session) + + +class _StoppingPredictHook(tf.compat.v1.train.SessionRunHook): + """Hook that requests stop according to the stopping signal in prediction.""" + + def __init__(self, scalar_stopping_signal): + self._scalar_stopping_signal = scalar_stopping_signal + + def begin(self): + self._iterations_per_loop_var = _create_or_get_iterations_per_loop() + + def after_create_session(self, session, coord): + # This is not necessary as we do not run infeed enqueue and outfeed dequeue + # in side threads for prediction model. But it makes the + # TPUInfeedOutfeedSessionHook prints nice message. + self._iterations_per_loop_var.load(1, session=session) + + def before_run(self, run_context): + return tf.compat.v1.train.SessionRunArgs(self._scalar_stopping_signal) + + def after_run(self, run_context, run_values): + _ = run_context + scalar_stopping_signal = run_values.results + if _StopSignals.should_stop(scalar_stopping_signal): + # NOTE(xiejw): In prediction, stopping signals are inserted for each + # batch. And we append one more batch to signal the system it should stop. + # The data flow might look like + # + # batch 0: images, labels, stop = 0 (user provided) + # batch 1: images, labels, stop = 0 (user provided) + # ... + # batch 99: images, labels, stop = 0 (user provided) + # batch 100: images, labels, stop = 1 (TPUEstimator appended) + # + # where the final batch (id = 100) is appended by TPUEstimator, so we + # should drop it before returning the predictions to user. + # To achieve that, we throw the OutOfRangeError in after_run. Once + # Monitored Session sees this error in SessionRunHook.after_run, the + # "current" prediction, i.e., batch with id=100, will be discarded + # immediately + raise tf.errors.OutOfRangeError(None, None, 'Stopped by stopping signal.') + + +def generate_per_core_enqueue_ops_fn_for_host(ctx, input_fn, + inputs_structure_recorder, + host_device, host_id): + """Generates infeed enqueue ops for per-core input_fn on a single host.""" + captured_infeed_queue = _CapturedObject() + tpu_ordinal_function_impl = ctx.tpu_ordinal_function(host_id) + + def enqueue_ops_fn(): + """A fn returns enqueue_ops.""" + num_cores_per_host = ctx.num_of_cores_per_host + per_host_sharded_inputs = [] + for core_ordinal in range(num_cores_per_host): + with ops.name_scope('ordinal_%d' % (core_ordinal)): + user_context = tpu_context.TPUContext( + internal_ctx=ctx, + input_device=host_device, + invocation_index=host_id * ctx.num_of_cores_per_host + core_ordinal, + host_id=host_id) + inputs = _Inputs.from_input_fn(input_fn(user_context)) + if inputs.is_dataset: + raise TypeError( + '`input_fn` returning `Dataset` is not yet supported in ' + 'per-Core input pipeline deployment yet. Please set ' + 'TPUConfig.per_host_input_for_training to True or return ' + '`features` and `labels` from `input_fn`') + features, labels = inputs.features_and_labels() + + inputs_structure_recorder.validate_and_record_structure( + features, labels) + flattened_inputs = ( + inputs_structure_recorder.flatten_features_and_labels( + features, labels)) + per_host_sharded_inputs.append(flattened_inputs) + + infeed_queue = tpu_feed.InfeedQueue( + number_of_tuple_elements=len(per_host_sharded_inputs[0])) + captured_infeed_queue.capture(infeed_queue) + + per_host_enqueue_ops = infeed_queue.generate_enqueue_ops( + per_host_sharded_inputs, tpu_ordinal_function=tpu_ordinal_function_impl) + return per_host_enqueue_ops + + return enqueue_ops_fn, captured_infeed_queue + + +def generate_per_host_enqueue_ops_fn_for_host(ctx, input_fn, + inputs_structure_recorder, + batch_axis, device, host_id): + """Generates infeed enqueue ops for per-host input_fn on a single host.""" + captured_infeed_queue = _CapturedObject() + + dataset_initializer = None + + with tf.compat.v1.device(device): + user_context = tpu_context.TPUContext( + internal_ctx=ctx, + input_device=device, + invocation_index=host_id, + host_id=host_id) + inputs = _Inputs.from_input_fn(input_fn(user_context)) + + is_dataset = inputs.is_dataset + if ctx.mode == model_fn_lib.ModeKeys.PREDICT: + if not is_dataset: + raise TypeError( + 'For mode PREDICT, `input_fn` must return `Dataset` instead of ' + '`features` and `labels`.') + if batch_axis is not None: + raise TypeError('For mode PREDICT, batch_axis is not supported yet.') + inputs = _InputsWithStoppingSignals( + dataset=inputs.dataset, + batch_size=ctx.batch_size_for_input_fn, + add_padding=True) + + if is_dataset: + dataset_initializer = inputs.dataset_initializer() + + tpu_ordinal_function_impl = ctx.tpu_ordinal_function(host_id) + + def enqueue_ops_fn(): + """A Fn returning the TPU infeed enqueue ops. + + By providing as a Fn, it can be invoked inside the tf.while_loop such that + the input pipeline for multiple iterations can be executed by one + Session.run call. + + Returns: + list of dict of ops. + """ + with tf.compat.v1.device(device): + num_of_replicas_per_host = ctx.num_of_replicas_per_host + # Convert user input to features and labels. If the user returns a + # dataset, it is initialized and the features and labels extracted via + # `dataset.iterator.get_next()` + features, labels = inputs.features_and_labels() + signals = inputs.signals() + + features, labels, enqueue_datas_list = ( + _tpu_estimator_embedding.split_inputs( + ctx, + features, + labels, + num_cores_per_batch=num_of_replicas_per_host)) + + inputs_structure_recorder.validate_and_record_structure(features, labels) + unsharded_tensor_list = ( + inputs_structure_recorder.flatten_features_and_labels( + features, labels, signals)) + + infeed_queue = tpu_feed.InfeedQueue( + tuple_types=[t.dtype for t in unsharded_tensor_list], + tuple_shapes=[t.shape for t in unsharded_tensor_list], + shard_dimensions=batch_axis) + captured_infeed_queue.capture(infeed_queue) + infeed_queue.set_number_of_shards(num_of_replicas_per_host) + per_host_enqueue_ops = ( + infeed_queue.split_inputs_and_generate_enqueue_ops( + unsharded_tensor_list, + placement_function=lambda x: device, + tpu_ordinal_function=tpu_ordinal_function_impl)) + + if ctx.embedding_config: + per_host_enqueue_ops.extend( + ctx.embedding_config.tpu_embedding.generate_enqueue_ops( + enqueue_datas_list)) + + if signals is None: + return per_host_enqueue_ops + else: + return { + 'ops': per_host_enqueue_ops, + 'signals': signals, + } + + return enqueue_ops_fn, captured_infeed_queue, dataset_initializer + + +def generate_per_host_v2_enqueue_ops_fn_for_host(ctx, input_fn, + inputs_structure_recorder, + device, host_id, + invocation_index): + """Generates infeed enqueue ops for per-host input_fn on a single host.""" + captured_infeed_queue = _CapturedObject() + dataset_initializer = None + + with tf.compat.v1.device(device): + user_context = tpu_context.TPUContext( + internal_ctx=ctx, + input_device=device, + invocation_index=invocation_index, + host_id=host_id) + inputs = _Inputs.from_input_fn(input_fn(user_context)) + + is_dataset = inputs.is_dataset + if not is_dataset: + raise TypeError('`input_fn` must return a `Dataset` for the PER_HOST_V2 ' + 'input pipeline configuration.') + + # Be aware that when num_cores_per_replica > num_cores_per_host, + # ctx.num_of_replicas_per_host is 0. + if ctx.mode == model_fn_lib.ModeKeys.PREDICT: + inputs = _InputsWithStoppingSignals( + dataset=inputs.dataset, + batch_size=ctx.batch_size_for_input_fn, + add_padding=True, + num_invocations_per_step=max(1, ctx.num_of_replicas_per_host)) + + dataset_initializer = inputs.dataset_initializer() + + tpu_ordinal_function_impl = ctx.tpu_ordinal_function(host_id) + + def device_function_impl(shard_id): + if ctx.device_assignment is not None: + # Find the replica_id of the host's logical core 0. + # The current host_id is guaranteed to contain the logical core 0, + # even when num_cores_per_replica > num_cores_per_host -- the function + # caller makes sure that this host_id will must be receiving data (calls + # input_fn). + replica_id = ctx.device_assignment.lookup_replicas( + task_id=host_id, logical_core=0)[shard_id] + return ctx.tpu_host_placement_function(replica_id=replica_id) + else: + return None + + def enqueue_ops_fn(): + """Generates the per_host enqueue ops.""" + control_deps = [] + per_host_sharded_inputs = [] + enqueue_datas_list = [] + # Be aware that when num_cores_per_replica > num_cores_per_host, + # ctx.num_of_replicas_per_host is 0. + num_replicas_per_host = max(1, ctx.num_of_replicas_per_host) + cached_signals = None + with tf.compat.v1.device(device): + if not inputs.is_dataset: + raise TypeError('`input_fn` must return a `Dataset` for this mode.') + for host in range(num_replicas_per_host): + # Use control dependencies to ensure a deterministic ordering. + if ctx.allow_per_host_v2_parallel_get_next: + features, labels = inputs.features_and_labels() # Calls get_next() + with tf.control_dependencies(control_deps): + if not ctx.allow_per_host_v2_parallel_get_next: + features, labels = inputs.features_and_labels() # Calls get_next() + signals = inputs.signals() + + # All the replicas share the replica 0's stopping signal. + # This avoids inconsistent state among different model replcias. + if cached_signals: + signals['stopping'] = cached_signals['stopping'] + else: + cached_signals = signals + + features, labels, enqueue_data = ( + _tpu_estimator_embedding.split_inputs(ctx, features, labels)) + if len(enqueue_data) != 1: + raise RuntimeError(('Missing or extra enqueue_data for host {}. ' + 'len(enqueue_data) = {}.').format( + host, len(enqueue_data))) + enqueue_datas_list.append(enqueue_data[0]) + + inputs_structure_recorder.validate_and_record_structure( + features, labels) + flattened_inputs = ( + inputs_structure_recorder.flatten_features_and_labels( + features, labels, signals)) + control_deps.extend(flattened_inputs) + per_host_sharded_inputs.append(flattened_inputs) + + if inputs_structure_recorder.flattened_input_dims: + input_partition_dims = inputs_structure_recorder.flattened_input_dims + if signals: + input_partition_dims += [None] * len(signals) + # pylint: disable=protected-access + infeed_queue = tpu_feed._PartitionedInfeedQueue( + number_of_tuple_elements=len(per_host_sharded_inputs[0]), + host_id=host_id, + input_partition_dims=input_partition_dims, + device_assignment=ctx.device_assignment) + per_host_enqueue_ops = infeed_queue.generate_enqueue_ops( + per_host_sharded_inputs) + else: + infeed_queue = tpu_feed.InfeedQueue( + number_of_tuple_elements=len(per_host_sharded_inputs[0])) + per_host_enqueue_ops = infeed_queue.generate_enqueue_ops( + per_host_sharded_inputs, + tpu_ordinal_function=tpu_ordinal_function_impl, + placement_function=device_function_impl) + + captured_infeed_queue.capture(infeed_queue) + + if ctx.embedding_config: + per_host_enqueue_ops.extend( + ctx.embedding_config.tpu_embedding.generate_enqueue_ops( + enqueue_datas_list)) + + if signals is None: + return per_host_enqueue_ops + else: + return { + 'ops': per_host_enqueue_ops, + 'signals': signals, + } + + return enqueue_ops_fn, captured_infeed_queue, dataset_initializer + + +def generate_broadcast_enqueue_ops_fn(ctx, input_fn, inputs_structure_recorder, + num_hosts): + """Generates infeed enqueue ops for one input_fn on all the hosts.""" + captured_infeed_queue = _CapturedObject() + dataset_initializer = None + device_0 = ctx.tpu_host_placement_function(host_id=0) + with tf.compat.v1.device(device_0): + user_context = tpu_context.TPUContext( + internal_ctx=ctx, input_device=device_0, invocation_index=0, host_id=0) + inputs = _Inputs.from_input_fn(input_fn(user_context)) + + is_dataset = inputs.is_dataset + if ctx.mode == model_fn_lib.ModeKeys.PREDICT: + if not is_dataset: + raise TypeError( + 'For mode PREDICT, `input_fn` must return `Dataset` instead of ' + '`features` and `labels`.') + + inputs = _InputsWithStoppingSignals( + dataset=inputs.dataset, + batch_size=ctx.batch_size_for_input_fn, + add_padding=True) + + if is_dataset: + dataset_initializer = inputs.dataset_initializer() + num_replicas_per_host = ctx.num_of_replicas_per_host + + def tpu_ordinal_function_impl(shard_id): + if ctx.device_assignment: + return ctx.device_assignment.tpu_ordinal(replica=shard_id) + else: + return shard_id % num_replicas_per_host + + def device_function_impl(shard_id): + # shard_id ranges from 0 to num_of_replicas_per_host - 1. + # A shard is a replica inside a host. + # In broadcast mode (generate_broadcast_enqueue_ops_fn), the enqueue ops + # are always executed on the first host. Thus shard_id equals to replica_id. + return ctx.tpu_host_placement_function(replica_id=shard_id) + + def enqueue_ops_fn(): + """Generates enqueue ops for all the hosts.""" + broadcasted_inputs = [] + flattened_inputs = None # Cache result from input_fn. + signals = None + num_replicas = ctx.num_replicas + core_id = 0 + for host_id in xrange(num_hosts): + with tf.compat.v1.device( + ctx.tpu_host_placement_function(host_id=host_id)): + for _ in xrange(ctx.num_of_replicas_per_host): + # Note: input_fn is only called once at host 0 for the first replica. + # The features and labels returned from that invocation are + # broadcasted to other replicas(including the replicas on other + # hosts). + if flattened_inputs is None: + features, labels = inputs.features_and_labels() # Calls get_next() + signals = inputs.signals() + + inputs_structure_recorder.validate_and_record_structure( + features, labels) + flattened_inputs = ( + inputs_structure_recorder.flatten_features_and_labels( + features, labels, signals)) + if (ctx.config.tpu_config.eval_training_input_configuration is + tpu_config.InputPipelineConfig.SLICED): + input_slices = [ + tf.split(x, num_replicas) for x in flattened_inputs + ] + if (ctx.config.tpu_config.eval_training_input_configuration is + tpu_config.InputPipelineConfig.SLICED): + # for each core, slice out the flattened_inputs for each core. + broadcasted_inputs.append([x[core_id] for x in input_slices]) + core_id += 1 + else: + broadcasted_inputs.append(flattened_inputs) + + infeed_queue = tpu_feed.InfeedQueue( + number_of_tuple_elements=len(broadcasted_inputs[0])) + captured_infeed_queue.capture(infeed_queue) + enqueue_ops = infeed_queue.generate_enqueue_ops( + broadcasted_inputs, + tpu_ordinal_function=tpu_ordinal_function_impl, + placement_function=device_function_impl) + + if signals is None: + return enqueue_ops + else: + return { + 'ops': enqueue_ops, + 'signals': signals, + } + + return enqueue_ops_fn, captured_infeed_queue, dataset_initializer + + +class TensorPacker(object): + """Pack and unpack small tensors into a big one for efficiency.""" + + def __init__(self, small_feature_dim_size, + minimum_num_small_features_to_group): + self._small_feature_dim_size = small_feature_dim_size + self._minimum_num_small_features_to_group = ( + minimum_num_small_features_to_group) + + def maybe_concatenate_features(self, features): + """If there are enough small tensors, concat them for performance.""" + self._small_feature_names = {} + self._small_feature_sizes = {} + feature_names = _extract_key_names(features) + if feature_names: # Not a single tensor. + # First pass: see if it is worth concatenating the small features. + for name in feature_names: + tensor = features[name] + # We do not handle nested inputs here. + if not isinstance(tensor, tf.Tensor): + return + shape = tensor.get_shape().as_list() + dtype = tensor.dtype + if (len(shape) == 2 and shape[1] is not None and + shape[1] <= self._small_feature_dim_size): + tf.compat.v1.logging.log_first_n( + tf.compat.v1.logging.INFO, + 'Found small feature: %s %s', 1, name, shape) + if tensor.dtype not in self._small_feature_names: + self._small_feature_names[dtype] = [] + self._small_feature_sizes[dtype] = [] + self._small_feature_names[dtype].append(name) + self._small_feature_sizes[dtype].append(shape[1]) + + dtypes_ = list(self._small_feature_names.keys()) + for dtype in dtypes_: + # If we could find 5 (or more) [batch_size, 1] dense features, + # we will group them. + if (len(self._small_feature_names[dtype]) < + self._minimum_num_small_features_to_group): + self._small_feature_names.pop(dtype) # reset + self._small_feature_sizes.pop(dtype) # reset + + # Second pass: separate small features out + small_feature_tensors = {} + for dtype in self._small_feature_names: + small_feature_tensors[dtype] = [] + for name in self._small_feature_names[dtype]: + small_feature_tensors[dtype].append(features.pop(name)) + + # Add the concat Tensor to features with a special key. + for dtype in self._small_feature_names: + key = self._get_small_feature_key(dtype) + if key in features: + raise ValueError('{} is reserved as feature key for concatenated' + 'small features.') + features[key] = (tf.concat(small_feature_tensors[dtype], axis=1)) + + def maybe_split_features(self, maybe_concatenated_features): + for dtype in self._small_feature_names: + key = self._get_small_feature_key(dtype) + concatenated_small_features = maybe_concatenated_features.pop(key) + splits = tf.split( + concatenated_small_features, self._small_feature_sizes[dtype], axis=1) + for name, split in zip(self._small_feature_names[dtype], splits): + maybe_concatenated_features[name] = split + + def _get_small_feature_key(self, dtype): + return _TENSOR_PACKER_CONCATENATED_SMALL_FEATURES_KEY + '_' + str(dtype) + + +class _InputPipeline(object): + """`_InputPipeline` handles invoking `input_fn` and piping to infeed queue. + + `_InputPipeline` abstracts the per-core/per-host `input_fn` invocation from + call site. To be precise, based on the configuration in + `_InternalTPUContext`, it invokes `input_fn` for all cores (usually + multi-host TPU training) or for one host (usually for single-host TPU + evaluation), and sends all `features` and `labels` returned by `input_fn` to + TPU infeed. For per-core invocation, `features` and `labels` are piped to + infeed directly, one tuple for each core. For per-host invocation, `features` + and `labels` are split at host (with respect to `batch_axis`) and piped to all + cores accordingly. + + In addition, flatten/unflatten are handled by `_InputPipeline` also. Model + inputs returned by the `input_fn` can have one of the following forms: + 1. features + 2. (features, labels) + 3. ((arbitrarily nested structure of features), labels) + + Internally, form 1 is reformed to `(features, None)` as features and labels + are passed separately to underlying methods. For TPU training, TPUEstimator + may expect multiple `features` and `labels` tuples one for each core. + + TPUEstimator allows various different structures for inputs (namely `features` + and `labels`). Both `features` and `labels` can be any nested sturcture + supported by TF nest (namely, dict, tuples, namedtuples or any nested + structure of such of Tensors). `labels` could be `None` as well. + + These are flattened before they are passed to the infeed/outfeed library + as that expectes flattend lists. + """ + + class InputsStructureRecorder(object): + """The recorder to record inputs structure.""" + + def __init__(self, input_partition_dims=None): + # Holds the structure of inputs + self._feature_structure = {} + self._flattened_input_dims = None + + if input_partition_dims: + # This should have been validated in TPUConfig. + assert len(input_partition_dims) <= 2, 'must have 1 or 2 elements.' + if len(input_partition_dims) == 2: + self._feature_dims, self._label_dims = input_partition_dims + else: + self._feature_dims = input_partition_dims[0] + self._label_dims = None + + assert self._feature_dims is not None, ('input_partition_dims[0] must ' + 'not be None') + else: + self._feature_dims = None + self._label_dims = None + + # Internal state. + self._initialized = False + + @property + def flattened_input_dims(self): + assert self._initialized, 'InputsStructureRecorder is not initialized.' + return self._flattened_input_dims + + def has_labels(self): + return 'labels' in self._feature_structure + + def _flatten_input_dims(self, features, labels, feature_dims, label_dims): + """Flatten input dims with the same order as flattened input tensors.""" + + try: + flattened_input_dims = data_nest.flatten_up_to(features, feature_dims) + except TypeError as e: + raise ValueError( + 'TPUConfig.input_partition_dims[0] mismatched the structure of' + ' features. input_partition_dims[0]: {}, features {}. {}'.format( + feature_dims, features, e)) + + if labels is not None: + if label_dims is not None: + try: + flattened_input_dims.extend( + data_nest.flatten_up_to(labels, self._label_dims)) + except TypeError as e: + raise ValueError( + 'TPUConfig.input_partition_dims[1] mismatched the structure of' + ' labels. input_partition_dims[1]: {}, labels: {}. {}'.format( + label_dims, labels, e)) + else: + num_label_tensors = len(data_nest.flatten(labels)) + flattened_input_dims.extend([None] * num_label_tensors) + return flattened_input_dims + + def validate_and_record_structure(self, features, labels): + """Validates and records the structure of `features` and `labels`.""" + # Extract structure. + feature_names = _extract_key_names(features) + label_names = _extract_key_names(labels) + + if not self._initialized: + # Record structure. + self._initialized = True + if self._feature_dims is not None: + feature_dims_names = _extract_key_names(self._feature_dims) + if feature_dims_names != feature_names: + raise ValueError( + 'TPUConfig.input_partition_dims[0] mismatched feature' + ' keys. Expected {}, got {}'.format(feature_names, + feature_dims_names)) + label_dims_names = _extract_key_names(self._label_dims) + if self._label_dims is not None and label_dims_names != label_names: + raise ValueError( + 'TPUConfig.input_partition_dims[1] mismatched label' + ' keys. Expected {}, got {}'.format(label_names, + label_dims_names)) + self._flattened_input_dims = self._flatten_input_dims( + features, labels, self._feature_dims, self._label_dims) + + def flatten_features_and_labels(self, features, labels, signals=None): + """Flattens the `features` and `labels` to a single tensor list.""" + self.tensor_packer = TensorPacker( + _TENSOR_PACKER_SMALL_FEATURE_DIM_SIZE, + _TENSOR_PACKER_MINIMUM_NUM_SMALL_FEATURES_TO_GROUP) + self.tensor_packer.maybe_concatenate_features(features) + self._feature_structure['features'] = features + if labels is not None: + self._feature_structure['labels'] = labels + if signals is not None: + self._feature_structure['signals'] = signals + return data_nest.flatten(self._feature_structure) + + def unflatten_features_and_labels(self, flattened_inputs): + """Restores the flattened inputs to original features and labels form. + + Args: + flattened_inputs: Flattened inputs for each shard. + + Returns: + A tuple of (`features`, `labels`), where `labels` could be None. + Each one, if present, should have identical structure (single tensor vs + dict) as the one returned by input_fn. + + Raises: + ValueError: If the number of expected tensors from `flattened_inputs` + mismatches the recorded structure. + """ + + unflattened_inputs = data_nest.pack_sequence_as(self._feature_structure, + flattened_inputs) + features = unflattened_inputs['features'] + self.tensor_packer.maybe_split_features(features) + return _Inputs( + features, + unflattened_inputs.get('labels'), + signals=unflattened_inputs.get('signals')) + + def __init__(self, input_fn, batch_axis, ctx): + """Constructor. + + Args: + input_fn: input fn for train or eval. + batch_axis: A python tuple of int values describing how each tensor + produced by the Estimator `input_fn` should be split across the TPU + compute shards. + ctx: A `_InternalTPUContext` instance with mode. + + Raises: + ValueError: If both `sharded_features` and `num_cores` are `None`. + """ + self._inputs_structure_recorder = _InputPipeline.InputsStructureRecorder( + ctx.input_partition_dims) + + self._sharded_per_core = ctx.is_input_sharded_per_core() + self._input_fn = input_fn + self._infeed_queue = None + self._ctx = ctx + self._batch_axis = batch_axis + + def generate_infeed_enqueue_ops_and_dequeue_fn(self): + """Generates infeed enqueue ops and dequeue_fn.""" + # While tf.while_loop is called, the body function, which invokes + # `enqueue_fn` passed in, is called to construct the graph. So, input_fn + # structure is recorded. + enqueue_ops, all_hooks, run_infeed_loop_on_coordinator = ( + self._invoke_input_fn_and_record_structure()) + + self._validate_input_pipeline() + + def dequeue_fn(): + """dequeue_fn is used by TPU to retrieve the tensors.""" + # In the model-parallel case, both the host-side and device-side + # computations must agree on the core on which infeed takes place. We + # choose to perform infeed on logical core 0 of each replica. + values = self._infeed_queue.generate_dequeue_op(tpu_device=0) + # The unflatten process uses the structure information recorded above. + return self._inputs_structure_recorder.unflatten_features_and_labels( + values) + + return (enqueue_ops, dequeue_fn, all_hooks, run_infeed_loop_on_coordinator) + + def _invoke_input_fn_and_record_structure(self): + """Deploys the input pipeline and record input structure.""" + enqueue_ops = [] + infeed_queues = [] + all_dataset_initializers = [] + num_hosts = self._ctx.num_hosts + tpu_host_placement_fn = self._ctx.tpu_host_placement_function + + run_infeed_loop_on_coordinator = True + + if self._sharded_per_core: + # Per-Core input pipeline deployment. + # Invoke input pipeline for each core and placed on the corresponding + # host. + for host_id in range(num_hosts): + host_device = tpu_host_placement_fn(host_id=host_id) + with tf.compat.v1.device(host_device): + with ops.name_scope('input_pipeline_task%d' % (host_id)): + enqueue_ops_fn, captured_infeed_queue = ( + generate_per_core_enqueue_ops_fn_for_host( + self._ctx, self._input_fn, self._inputs_structure_recorder, + host_device, host_id)) + + if _WRAP_INPUT_FN_INTO_WHILE_LOOP: + run_infeed_loop_on_coordinator = False + enqueue_ops.append( + _wrap_computation_in_while_loop( + device=host_device, op_fn=enqueue_ops_fn)) + else: + enqueue_ops.append(enqueue_ops_fn()) + # Infeed_queue_getter must be called after enqueue_ops_fn is called. + infeed_queues.append(captured_infeed_queue.get()) + + elif self._ctx.is_input_broadcast_with_iterators(): + # Only calls input_fn in host 0. + host_device = tpu_host_placement_fn(host_id=0) + enqueue_ops_fn, captured_infeed_queue, dataset_initializer = ( + generate_broadcast_enqueue_ops_fn(self._ctx, self._input_fn, + self._inputs_structure_recorder, + num_hosts)) + if dataset_initializer: + all_dataset_initializers.append(dataset_initializer) + run_infeed_loop_on_coordinator = False + wrap_fn = ( + _wrap_computation_in_while_loop + if self._ctx.mode != model_fn_lib.ModeKeys.PREDICT else + _wrap_computation_in_while_loop_with_stopping_signals) + enqueue_ops.append(wrap_fn(device=host_device, op_fn=enqueue_ops_fn)) + else: + enqueue_ops.append(enqueue_ops_fn()) + infeed_queues.append(captured_infeed_queue.get()) + + else: + # This branch handles two senarios: + # num_cores_per_replica > num_cores_per_host + # and num_cores_per_replica <= num_cores_per_host + # First, get the set of host_ids, by iterating replicas. + # We only want and will get the set of *unique* host_ids + # *that will call input_fn*. For each replica, we only call the input_fn + # from the CPU host that contains logical core 0. + + # Use a list here to ensure deterministic order. + host_id_with_invocation_id_pair = [] + + if not self._ctx.is_replica_across_hosts(): + for host_id in range(num_hosts): + invocation_index = host_id + host_id_with_invocation_id_pair.append((host_id, invocation_index)) + else: + for replica_id in xrange(self._ctx.num_replicas): + invocation_index = replica_id + host_device, _ = self._ctx.device_for_replica(replica_id) + # TODO(lehou): Get host_id in a better way. + host_id = int(host_device.split('/task:')[1].split('/device:')[0]) + host_id_with_invocation_id_pair.append((host_id, invocation_index)) + + for (host_id, invocation_index) in host_id_with_invocation_id_pair: + host_device = tpu_host_placement_fn(host_id=host_id) + with tf.compat.v1.device(host_device): + with ops.name_scope('input_pipeline_task%d' % (host_id)): + if self._ctx.is_input_per_host_with_iterators(): + enqueue_ops_fn, captured_infeed_queue, dataset_initializer = ( + generate_per_host_v2_enqueue_ops_fn_for_host( + self._ctx, self._input_fn, + self._inputs_structure_recorder, host_device, host_id, + invocation_index)) + else: + enqueue_ops_fn, captured_infeed_queue, dataset_initializer = ( + generate_per_host_enqueue_ops_fn_for_host( + self._ctx, self._input_fn, + self._inputs_structure_recorder, self._batch_axis, + host_device, host_id)) + + # NOTE(xiejw): We dispatch here based on the return type of the + # users `input_fn`. + # + # 1. If input_fn returns a Dataset instance, we initialize the + # iterator outside of tf.while_loop, and call the iterator.get_next + # inside tf.while_loop. This should be always safe. + # + # 2. If input_fn returns (features, labels), it is too late to wrap + # them inside tf.while_loop, as resource initialization cannot be + # handled in TF control flow properly. In this case, we will use + # python loop to enqueue the data into TPU system. This may be + # slow compared to the previous case. + if dataset_initializer: + all_dataset_initializers.append(dataset_initializer) + run_infeed_loop_on_coordinator = False + wrap_fn = ( + _wrap_computation_in_while_loop + if self._ctx.mode != model_fn_lib.ModeKeys.PREDICT else + _wrap_computation_in_while_loop_with_stopping_signals) + enqueue_ops.append( + wrap_fn(device=host_device, op_fn=enqueue_ops_fn)) + else: + enqueue_ops.append(enqueue_ops_fn()) + infeed_queues.append(captured_infeed_queue.get()) + + # infeed_queue is used to generate dequeue ops. The only thing it uses for + # dequeue is dtypes and types. So, any one can be used. Here, grab the + # first one. + self._infeed_queue = infeed_queues[0] + return enqueue_ops, [ + util_lib.MultiHostDatasetInitializerHook(all_dataset_initializers) + ], run_infeed_loop_on_coordinator + + def _validate_input_pipeline(self): + """Validates the input pipeline. + + Perform some sanity checks to log user friendly information. We should + error out to give users better error message. But, if + _WRAP_INPUT_FN_INTO_WHILE_LOOP is False (legacy behavior), we cannot break + user code, so, log a warning. + + Raises: + RuntimeError: If the validation failed. + """ + if tf.compat.v1.get_default_graph().get_collection( + tf.compat.v1.GraphKeys.QUEUE_RUNNERS): + err_msg = ('Input pipeline contains one or more QueueRunners. ' + 'It could be slow and not scalable. Please consider ' + 'converting your input pipeline to use `tf.data` instead (see ' + 'https://www.tensorflow.org/guide/datasets for ' + 'instructions.') + if _WRAP_INPUT_FN_INTO_WHILE_LOOP: + raise RuntimeError(err_msg) + else: + logging.warn(err_msg) + + +def call_computation(computation_inputs, computation, batch_config=None): + """Call computation. + + Args: + computation_inputs: A tensor or dict of tensors, the inputs to the + computation. + computation: A Python function that takes no inputs and builds computation + graph. If `computation` returns m outputs, this function will return a + list of m Tensors. + batch_config: A BatchConfig named tuple specifying the batching + configuration to use for inference batching. + + Returns: + A list of output tensors. + """ + + # Using `TPUPartitionedCall` makes it possible to target a different + # TPU core with every `Session.run()` call. Note that the entire inference + # graph executes on a single core, and that invocations of this graph + # will round-robin among the cores attached to a host. + def tpu_partitioned_call(partition_inputs): + + # capture_resource_var_by_value enables variables to be mirrored on TPU + # to avoid fetching from CPU, since variables do not change during + # inference. + @function.Defun(capture_resource_var_by_value=False) + def tpu_subgraph(): + return computation(partition_inputs) + + return tpu_functional.TPUPartitionedCall( + args=tpu_subgraph.captured_inputs, + device_ordinal=tpu_ops.tpu_ordinal_selector(), + Tout=[o.type for o in tpu_subgraph.definition.signature.output_arg], + f=tpu_subgraph) + + # Not using Batching Function but use TPUPartitionedCall/all cores. + if not batch_config: + return tpu_partitioned_call(computation_inputs) + + # Use Batching Function and TPUPartitionedCall/all cores. + # Note that BatchingFunction requires a list of tensors and doesn't support + # a dict of tensors. So we preserve the structure by deterministically + # flattening the dict before batching and then recomposing it after batching + # to feed into the computation. + ordered_inputs_list = tf.nest.flatten(computation_inputs) + + @tf.nondifferentiable_batch_function( + num_batch_threads=batch_config.num_batch_threads, + max_batch_size=batch_config.max_batch_size, + batch_timeout_micros=batch_config.batch_timeout_micros, + allowed_batch_sizes=batch_config.allowed_batch_sizes, + max_enqueued_batches=batch_config.max_enqueued_batches, + autograph=False) + def batched_tpu_computation(*tensor_args): + """Recompose the input feature dict and calls the TPU computation.""" + computation_feature_input = tf.nest.pack_sequence_as( + computation_inputs, tensor_args) + return tpu_partitioned_call(computation_feature_input) + + return batched_tpu_computation(*ordered_inputs_list) + + +class _ModelFnWrapper(object): + """A `model_fn` wrapper. + + This makes calling model_fn on CPU and TPU easier and more consistent and + performs necessary check and mutation required by TPU training and evaluation. + + In addition, this wrapper manages converting the `model_fn` to a single TPU + train and eval step. + """ + + def __init__(self, model_fn, config, params, ctx): + self._model_fn = model_fn + self._config = config + self._params = params + self._ctx = ctx + + def call_without_tpu(self, features, labels, is_export_mode): + return self._call_model_fn(features, labels, is_export_mode=is_export_mode) + + def _add_embedding_features(self, features, hook_dummy_table_variables): + """Add embedding features, optionally add hook to intercept gradient.""" + if self._ctx.embedding_config: + tpu_embedding_ = self._ctx.embedding_config.tpu_embedding + embedding_activations = tpu_embedding_.get_activations() + if hook_dummy_table_variables: + new_embedding_activations = ( + tpu_embedding_gradient.hook_dummy_table_variables_to_activations( + tpu_embedding_, embedding_activations, + self._ctx.embedding_config.dummy_table_variables)) + features.update(new_embedding_activations) + else: + features.update(embedding_activations) + + def convert_to_single_tpu_train_step(self, dequeue_fn): + """Converts user provided model_fn` as a single train step on TPU. + + The user provided `model_fn` takes input tuple + (features, labels) and produces the EstimatorSpec with train_op and loss for + train `mode`. This usually represents a single train computation on CPU. + + For TPU training, a train (computation) step is first wrapped in a + tf.while_loop control flow to repeat for many times and then replicated to + all TPU shards. Besides the input should be taken from TPU infeed rather + than input pipeline (input_fn) directly. To fit TPU loop and replicate + pattern, the original train computation should be reformed, which is the + returned `train_step`. + + Args: + dequeue_fn: The function to retrieve inputs, features and labels, from TPU + infeed dequeue channel. + + Returns: + A tuple of train_fn, host_calls, and captured scaffold_fn. The train_fn + representing the train step for TPU. + """ + + host_call = _OutfeedHostCall( + self._ctx, + outfeed_every_n_steps=self._config.tpu_config + .experimental_host_call_every_n_steps) + captured_scaffold_fn = _CapturedObject() + captured_training_hooks = _CapturedObject() + + def train_step(step): + """Training step function for use inside a while loop.""" + inputs = dequeue_fn() + features, labels = inputs.features_and_labels() + self._add_embedding_features(features, True) + + estimator_spec = self._verify_estimator_spec( + self._call_model_fn(features, labels)) + loss, train_op = estimator_spec.loss, estimator_spec.train_op + + if tensor_tracer.TensorTracer.is_enabled(): + tt = tensor_tracer.TensorTracer() + loss = tt.trace_tpu(tf.compat.v1.get_default_graph(), loss, train_op, + self._ctx.num_replicas) + tracer_host_call = tt.host_call_deps_and_fn() + else: + tracer_host_call = {} + + if isinstance(estimator_spec, model_fn_lib._TPUEstimatorSpec): # pylint: disable=protected-access + captured_scaffold_fn.capture(estimator_spec.scaffold_fn) + else: + captured_scaffold_fn.capture(None) + + captured_training_hooks.capture(estimator_spec.training_hooks) + + if self._ctx.embedding_config is None: + apply_sparse_grads = [] + else: + tpu_embedding_ = self._ctx.embedding_config.tpu_embedding + gradients = ( + tpu_embedding_gradient.get_gradients_through_dummy_table_variables( + tpu_embedding_)) + grad_multiplier = self._ctx.embedding_config.get_grad_multiplier() + if grad_multiplier is not None: + scaled_gradients = collections.OrderedDict( + (k, v * grad_multiplier) for k, v in six.iteritems(gradients)) + else: + scaled_gradients = gradients + apply_sparse_grads = [ + tpu_embedding_.generate_send_gradients_op( + scaled_gradients, tf.compat.v1.train.get_global_step()) + ] + + stopping_signals = None + user_provided_stopping_signals_name = None + if self._ctx.feed_hook is not None: + stopping_signals, user_provided_stopping_signals_name = \ + self._ctx.feed_hook.get_stopping_signals_and_name(features) + + # We must run train_op to update the variables prior to running the + # outfeed. + with tf.control_dependencies([train_op] + apply_sparse_grads): + host_call_outfeed_ops = [] + host_call_fn, host_call_args = None, [] + + if (isinstance(estimator_spec, model_fn_lib._TPUEstimatorSpec) # pylint: disable=protected-access + and estimator_spec.host_call is not None): + host_call_fn, host_call_args = estimator_spec.host_call + + if stopping_signals is not None: + identity_fn = lambda **kwargs: kwargs + tracer_host_call[user_provided_stopping_signals_name] = [ + identity_fn, stopping_signals + ] + + if host_call_fn: + # Ignore dummy hostcalls (no arguments) + if host_call_args: + tracer_host_call.update({'host_call': estimator_spec.host_call}) + host_call.record(tracer_host_call) + host_call_outfeed_ops = host_call.create_enqueue_op(step) + elif tracer_host_call: + host_call.record(tracer_host_call) + host_call_outfeed_ops = host_call.create_enqueue_op(step) + else: + # Create a host call for the loss to track execution progress + # Without this, we don't have any indication of the state of the + # TPU program. + tracer_host_call.update( + {'host_call': (lambda loss_t: loss_t, [tf.reshape(loss, [1])])}) + host_call.record(tracer_host_call) + host_call_outfeed_ops = host_call.create_enqueue_op(step) + + with tf.control_dependencies(host_call_outfeed_ops): + return tf.identity(loss) + + return (train_step, host_call, captured_scaffold_fn, + captured_training_hooks) + + def convert_to_single_tpu_eval_step(self, dequeue_fn): + """Converts user provided model_fn` as a single eval step on TPU. + + Similar to training, the user provided `model_fn` takes input tuple + (features, labels) and produces the TPUEstimatorSpec with eval_metrics for + eval `mode`. This usually represents a single evaluation computation on CPU. + + For TPU evaluation, a eval (computation) step is first wrapped in a + tf.while_loop control flow to repeat for many times and then replicated to + all TPU shards. Besides the input and output are slightly different. Input, + features and labels, should be taken from TPU infeed rather than input + pipeline (input_fn) directly. Output is managed in two stages. First, the + model outputs as the result of evaluation computation, usually model logits, + should be transferred from TPU system to CPU. Then, all model outputs are + concatenated first on CPU and sent to the metric_fn for metrics computation. + To fit TPU evaluation pattern, the original eval computation should be + reformed, which is the returned `eval_step`. + + Args: + dequeue_fn: The function to retrieve inputs, features and labels, from TPU + infeed dequeue channel. + + Returns: + A tuple of eval_fn, host_calls, and captured scaffold_fn. The eval_fn + representing the eval step for TPU. + """ + host_calls = _OutfeedHostCall(self._ctx) + captured_scaffold_fn = _CapturedObject() + captured_eval_hooks = _CapturedObject() + + def eval_step(total_loss): + """Evaluation step function for use inside a while loop.""" + inputs = dequeue_fn() + features, labels = inputs.features_and_labels() + self._add_embedding_features(features, False) + + tpu_estimator_spec = self._call_model_fn(features, labels) + if not isinstance(tpu_estimator_spec, model_fn_lib._TPUEstimatorSpec): # pylint: disable=protected-access + raise RuntimeError( + 'estimator_spec used by TPU evaluation must have type' + '`TPUEstimatorSpec`. Got {}'.format(type(tpu_estimator_spec))) + + loss = tpu_estimator_spec.loss + captured_scaffold_fn.capture(tpu_estimator_spec.scaffold_fn) + captured_eval_hooks.capture(tpu_estimator_spec.evaluation_hooks) + + to_record = {} + if tpu_estimator_spec.eval_metrics: + to_record['eval_metrics'] = tpu_estimator_spec.eval_metrics + if tpu_estimator_spec.host_call is not None: + # We assume that evaluate won't update global step, so we don't wrap + # this host_call. + to_record['host_call'] = tpu_estimator_spec.host_call + host_calls.record(to_record) + + with tf.control_dependencies(host_calls.create_enqueue_op()): + return tf.math.add(total_loss, loss) + + return eval_step, host_calls, captured_scaffold_fn, captured_eval_hooks + + def convert_to_single_tpu_predict_step(self, dequeue_fn): + """Converts user provided model_fn` as a single predict step on TPU. + + Args: + dequeue_fn: The function to retrieve inputs, features and labels, from TPU + infeed dequeue channel. + + Returns: + A tuple of predict_fn, host_calls, and captured scaffold_fn. The + predict_fn representing the predict step for TPU. + """ + host_calls = _OutfeedHostCall(self._ctx) + captured_scaffold_fn = _CapturedObject() + captured_predict_hooks = _CapturedObject() + + def predict_step(unused_scalar_stopping_signal): + """Evaluation step function for use inside a while loop.""" + inputs = dequeue_fn() + features, labels = inputs.features_and_labels() + stopping_signals = inputs.signals() + + assert stopping_signals is not None, ( + 'Internal Error: `signals` is missing.') + + tpu_estimator_spec = self._call_model_fn( + features, labels, is_export_mode=False) + if not isinstance(tpu_estimator_spec, model_fn_lib._TPUEstimatorSpec): # pylint: disable=protected-access + raise RuntimeError( + 'estimator_spec used by TPU prediction must have type' + '`TPUEstimatorSpec`. Got {}'.format(type(tpu_estimator_spec))) + + self._verify_tpu_spec_predictions(tpu_estimator_spec.predictions) + + captured_scaffold_fn.capture(tpu_estimator_spec.scaffold_fn) + captured_predict_hooks.capture(tpu_estimator_spec.prediction_hooks) + to_record = {} + identity_fn = lambda **kwargs: kwargs + to_record['predictions'] = [identity_fn, tpu_estimator_spec.predictions] + to_record['signals'] = [identity_fn, stopping_signals] + if tpu_estimator_spec.host_call is not None: + to_record['host_call'] = tpu_estimator_spec.host_call + host_calls.record(to_record) + + with tf.control_dependencies(host_calls.create_enqueue_op()): + return _StopSignals.as_scalar_stopping_signal(stopping_signals) + + return (predict_step, host_calls, captured_scaffold_fn, + captured_predict_hooks) + + def _verify_tpu_spec_predictions(self, predictions): + """Validates TPUEstimatorSpec.predictions dict.""" + # TODO(xiejw): Adds validation for prediction dictionrary. + # TODO(xiejw): Adds support for single tensor as predictions. + if not isinstance(predictions, dict): + raise TypeError('TPUEstimatorSpec.predictions must be dict of Tensors.') + + for (key, tensor) in predictions.items(): + if tensor.shape.dims[0].value is None: + raise ValueError( + 'The tensor with key ({}) in TPUEstimatorSpec.predictions has ' + 'dynamic shape (should be static). Tensor: {}'.format(key, tensor)) + return predictions + + def _validate_model_features_and_labels(self, features, labels, + is_export_mode): + """Validates that the features and labels for the model function are valid. + + A valid features/labels object is the one with: + - Type: A tensor or any nested structure of tensors supported by TF nest, + namely nested dictionary, tuple, namedtuple, or sequence of tensors. + - Static shape if is_export_mode is False. + + Args: + features: the features that would be input to the model function. + labels: the labels that would be input to the model function. + is_export_mode: boolean value specifying if in export mode. + + Raises: + TypeError: If features/labels are not of the correct type. + ValueError: If features/labels have dynamic shape. + """ + + def validate(obj, obj_name): + """Helper validate function.""" + if is_export_mode or self._ctx.is_running_on_cpu(is_export_mode): + return + if isinstance(obj, tf.Tensor): + if not obj.get_shape().is_fully_defined(): + raise ValueError( + 'The {} to the model returned by input_fn must have static shape.' + ' Tensor: {}'.format(obj_name, obj)) + else: + for tensor in data_nest.flatten(obj): + if not tensor.get_shape().is_fully_defined(): + raise ValueError( + ('The {} to the model returned by input_fn must have static ' + 'shape. Tensor: {}').format(obj_name, tensor)) + + validate(features, 'features') + if labels is not None: + validate(labels, 'labels') + + def _call_model_fn(self, features, labels, is_export_mode=False): + """Calls the model_fn with required parameters.""" + self._validate_model_features_and_labels(features, labels, is_export_mode) + model_fn_args = function_utils.fn_args(self._model_fn) + kwargs = {} + + # Makes deep copy with `config` and params` in case user mutates them. + config = copy.deepcopy(self._config) + params = copy.deepcopy(self._params) + + if 'labels' in model_fn_args: + kwargs['labels'] = labels + elif labels is not None: + raise ValueError( + 'model_fn does not take labels, but input_fn returns labels.') + if 'mode' in model_fn_args: + kwargs['mode'] = self._ctx.mode + if 'config' in model_fn_args: + kwargs['config'] = config + if 'params' in model_fn_args: + kwargs['params'] = params + + if 'params' not in model_fn_args: + raise ValueError('model_fn ({}) does not include params argument, ' + 'required by TPUEstimator to pass batch size as ' + 'params[\'batch_size\']'.format(self._model_fn)) + + if is_export_mode: + batch_size_for_model_fn = None + else: + batch_size_for_model_fn = self._ctx.batch_size_for_model_fn + + if batch_size_for_model_fn is not None: + _add_item_to_params(params, _BATCH_SIZE_KEY, batch_size_for_model_fn) + + running_on_cpu = self._ctx.is_running_on_cpu(is_export_mode) + # In export mode, params['use_tpu'] has already been set based on mode + # (i.e. True for _REWRITE_FOR_INFERENCE_MODE, False otherwise). + if not is_export_mode: + _add_item_to_params(params, _USE_TPU_KEY, not running_on_cpu) + + if not running_on_cpu: + user_context = tpu_context.TPUContext( + internal_ctx=self._ctx, call_from_input_fn=False) + _add_item_to_params(params, _CTX_KEY, user_context) + + estimator_spec = self._model_fn(features=features, **kwargs) + if (running_on_cpu and + isinstance(estimator_spec, model_fn_lib._TPUEstimatorSpec)): # pylint: disable=protected-access + # The estimator_spec will be passed to `Estimator` directly, which expects + # type `EstimatorSpec`. As we are running on the CPU, escape + # the TPUInferenceContext. + graph_context = tf.compat.v1.get_default_graph( + )._get_control_flow_context() + try: + if isinstance(graph_context, tpu._TPUInferenceContext): + tf.compat.v1.get_default_graph()._set_control_flow_context( + graph_context.outer_context) + return estimator_spec.as_estimator_spec() + finally: + tf.compat.v1.get_default_graph()._set_control_flow_context( + graph_context) + else: + return estimator_spec + + def _verify_estimator_spec(self, estimator_spec): + """Validates the estimator_spec.""" + if isinstance(estimator_spec, model_fn_lib._TPUEstimatorSpec): # pylint: disable=protected-access + return estimator_spec + + err_msg = '{} returned by EstimatorSpec is not supported in TPUEstimator.' + if estimator_spec.training_chief_hooks: + raise ValueError( + err_msg.format('training_chief_hooks') + 'If you want' + + ' to pass training hooks, please pass via training_hooks.') + + if estimator_spec.scaffold: + tf.compat.v1.logging.warn( + 'EstimatorSpec.Scaffold is ignored by TPU train/eval. ' + 'Please use TPUEstimatorSpec.') + return estimator_spec + + +class _OutfeedHostCall(object): + """Support for `eval_metrics` and `host_call` in TPUEstimatorSpec.""" + + def __init__(self, ctx, outfeed_every_n_steps=1): + self._ctx = ctx + self._names = [] + # All of these are dictionaries of lists keyed on the name. + self._host_fns = {} + self._tensor_keys = collections.defaultdict(list) + self._tensors = collections.defaultdict(list) + self._tensor_dtypes = collections.defaultdict(list) + self._tensor_shapes = collections.defaultdict(list) + self._outfeed_every_n_steps = outfeed_every_n_steps + + @staticmethod + def validate(host_calls): + """Validates the `eval_metrics` and `host_call` in `TPUEstimatorSpec`.""" + + for name, host_call in host_calls.items(): + if not isinstance(host_call, (tuple, list)): + raise ValueError('{} should be tuple or list'.format(name)) + if len(host_call) != 2: + raise ValueError('{} should have two elements.'.format(name)) + if not callable(host_call[0]): + raise TypeError('{}[0] should be callable.'.format(name)) + if not isinstance(host_call[1], (tuple, list, dict)): + raise ValueError('{}[1] should be tuple or list, or dict.'.format(name)) + + if isinstance(host_call[1], (tuple, list)): + fullargspec = tf_inspect.getfullargspec(host_call[0]) + fn_args = function_utils.fn_args(host_call[0]) + # wrapped_hostcall_with_global_step uses varargs, so we allow that. + if fullargspec.varargs is None and len(host_call[1]) != len(fn_args): + raise RuntimeError( + 'In TPUEstimatorSpec.{}, length of tensors {} does not match ' + 'method args of the function, which takes {}.'.format( + name, len(host_call[1]), len(fn_args))) + + @staticmethod + def create_cpu_hostcall(host_calls): + """Runs on the host_call on CPU instead of TPU when use_tpu=False.""" + + _OutfeedHostCall.validate(host_calls) + ret = {} + for name, host_call in host_calls.items(): + host_fn, tensors = host_call + if isinstance(tensors, (tuple, list)): + ret[name] = host_fn(*tensors) + else: + # Must be dict. + try: + ret[name] = host_fn(**tensors) + except TypeError as e: + tf.compat.v1.logging.warn( + 'Exception while calling %s: %s. It is likely the tensors ' + '(%s[1]) do not match the ' + 'function\'s arguments', name, e, name) + raise + return ret + + def record(self, host_calls): + """Records the host_call structure.""" + + for name, host_call in host_calls.items(): + host_fn, tensor_list_or_dict = host_call + self._names.append(name) + self._host_fns[name] = host_fn + + if isinstance(tensor_list_or_dict, dict): + for (key, tensor) in six.iteritems(tensor_list_or_dict): + self._tensor_keys[name].append(key) + self._tensors[name].append(tensor) + self._tensor_dtypes[name].append(tensor.dtype) + self._tensor_shapes[name].append(tensor.shape) + else: + # List or tuple. + self._tensor_keys[name] = None + for tensor in tensor_list_or_dict: + self._tensors[name].append(tensor) + self._tensor_dtypes[name].append(tensor.dtype) + self._tensor_shapes[name].append(tensor.shape) + + def create_enqueue_op(self, step=None): + """Create the op to enqueue the recorded host_calls. + + Returns: + A list of enqueue ops, which is empty if there are no host calls. + """ + if not self._names: + return [] + + tensors = [] + # TODO(jhseu): Consider deduping tensors. + for name in self._names: + tensors.extend(self._tensors[name]) + + if self._outfeed_every_n_steps > 1 and step is None: + raise ValueError('If outfeed is requested every n steps, you must pass ' + 'a tensor whose value is the step number within the ' + 'current training loop.') + with tf.compat.v1.device(tf.compat.v1.tpu.core(0)): + if self._outfeed_every_n_steps == 1: + return [tpu_ops.outfeed_enqueue_tuple(tensors)] + else: + return [ + tf.compat.v1.cond( + tf.math.equal( + tf.math.floormod(step, self._outfeed_every_n_steps), + 0), lambda: tpu_ops.outfeed_enqueue_tuple(tensors), + lambda: tf.no_op()) + ] + + def create_tpu_hostcall(self): + """Sends the tensors through outfeed and runs the host_fn on CPU. + + The tensors are concatenated along dimension 0 to form a global tensor + across all shards. The concatenated function is passed to the host_fn and + executed on the first host. + + Returns: + A dictionary mapping name to the return type of the host_call by that + name. + + Raises: + RuntimeError: If outfeed tensor is scalar. + """ + if not self._names: + return {} + + ret = {} + # For each i, dequeue_ops[i] is a list containing the tensors from all + # shards. This list is concatenated later. + dequeue_ops = [] + tensor_dtypes = [] + tensor_shapes = [] + for name in self._names: + for _ in self._tensors[name]: + dequeue_ops.append([]) + for dtype in self._tensor_dtypes[name]: + tensor_dtypes.append(dtype) + for shape in self._tensor_shapes[name]: + tensor_shapes.append(shape) + + # Outfeed ops execute on each replica's first logical core. Note: we must + # constraint it such that we have at most one outfeed dequeue and enqueue + # per replica. + for i in xrange(self._ctx.num_replicas): + host_device, ordinal_id = self._ctx.device_for_replica(i) + with tf.compat.v1.device(host_device): + outfeed_tensors = tpu_ops.outfeed_dequeue_tuple( + dtypes=tensor_dtypes, + shapes=tensor_shapes, + device_ordinal=ordinal_id) + for j, item in enumerate(outfeed_tensors): + dequeue_ops[j].append(item) + + # Deconstruct dequeue ops. + flat_dequeue_ops = [] + for l in dequeue_ops: + flat_dequeue_ops.extend(l) + + dequeue_ops_by_name = {} + pos = 0 + for name in self._names: + dequeue_ops_by_name[name] = dequeue_ops[pos:pos + + len(self._tensors[name])] + pos += len(self._tensors[name]) + + def _call_host_fn(fn, *args, **kw): + context = CatchInvalidHostcallFunctions() + context.Enter() + result = fn(*args, **kw) + context.Exit() + context.ExitResult(result) + return result + + # It is assumed evaluation always happens on single host TPU system. So, + # place all ops on tpu host if possible. + # + # TODO(jhseu): Evaluate whether this is right for summaries. + with tf.compat.v1.device( + self._ctx.tpu_host_placement_function(replica_id=0)): + for name in self._names: + dequeue_ops = dequeue_ops_by_name[name] + for i, item in enumerate(dequeue_ops): + # TODO(xiejw): Make the specification of the outfeed combinaton + # function more explicit and well-documented. We may want to give the + # user the option of concatenating along any axis. + if (self._ctx.config.tpu_config.per_host_input_for_training is + tpu_config.InputPipelineConfig.BROADCAST): + # If the infeed is in BROADCAST mode (each core recieving the same + # input), then we assume that the cores also produce identical + # copies of the same output, and we simply take the output from + # the first core. This mode is used by Mesh-TensorFlow. + with tf.control_dependencies(dequeue_ops[i]): + dequeue_ops[i] = tf.identity(dequeue_ops[i][0]) + else: + if dequeue_ops[i][0].shape.ndims == 0: + raise RuntimeError( + 'All tensors outfed from TPU should preserve batch size ' + 'dimension, but got scalar {}'.format(dequeue_ops[i][0])) + # Assume that the input has been batch-split and that axis 0 of the + # output tensors represents the batch size. Concatenate along + # the axis 0 to re-combine the batch. + dequeue_ops[i] = tf.concat(dequeue_ops[i], axis=0) + + if self._tensor_keys[name] is not None: + # The user-provided eval_metrics[1] is a dict. + dequeue_ops = dict(zip(self._tensor_keys[name], dequeue_ops)) + try: + ret[name] = _call_host_fn(self._host_fns[name], **dequeue_ops) + except TypeError as e: + tf.compat.v1.logging.warn( + 'Exception while calling %s: %s. It is likely the tensors ' + '(%s[1]) do not match the ' + 'function\'s arguments', name, e, name) + raise + else: + ret[name] = _call_host_fn(self._host_fns[name], *dequeue_ops) + + # force all dequeue operations to be run if not consumed by the host calls + ret['__force_dequeue'] = tf.group(*flat_dequeue_ops) + return ret + + +class _OutfeedHostCallHook(tf.compat.v1.train.SessionRunHook): + """Hook to run host calls when use_tpu=False.""" + + def __init__(self, tensors): + self._tensors = tensors + + def begin(self): + # We duplicate this code from the TPUInfeedOutfeedSessionHook rather than + # create a separate hook to guarantee execution order, because summaries + # need to be initialized before the outfeed thread starts. + # TODO(jhseu): Make a wrapper hook instead? + self._init_ops = summary_ops_v2.summary_writer_initializer_op() + # Get all the writer resources from the initializer, so we know what to + # flush. + self._finalize_ops = [] + for op in self._init_ops: + self._finalize_ops.append( + summary_ops_v2.legacy_raw_flush(writer=op.inputs[0])) + + def after_create_session(self, session, coord): + session.run(self._init_ops) + + def before_run(self, run_context): + return tf.compat.v1.train.SessionRunArgs(self._tensors) + + def end(self, session): + session.run(self._finalize_ops) + + +class _NotSaver(object): + """What to pass instead of a saver object if you don't want saving.""" + + def __init__(self, message): + self._message = message + + def save(self, *args, **kwargs): + del args, kwargs + tf.compat.v1.logging.info(self._message) + + +class ExamplesPerSecondHook(tf.compat.v1.train.StepCounterHook): + """Calculate and report global_step/sec and examples/sec during runtime.""" + + def __init__(self, + batch_size, + every_n_steps=100, + every_n_secs=None, + output_dir=None, + summary_writer=None): + self._batch_size = batch_size + super(ExamplesPerSecondHook, self).__init__( + every_n_steps=every_n_steps, + every_n_secs=every_n_secs, + output_dir=output_dir, + summary_writer=summary_writer) + + def _log_and_record(self, elapsed_steps, elapsed_time, global_step): + global_step_per_sec = elapsed_steps / elapsed_time + examples_per_sec = self._batch_size * global_step_per_sec + if self._summary_writer is not None: + global_step_summary = Summary(value=[ + Summary.Value( + tag='global_step/sec', simple_value=global_step_per_sec) + ]) + example_summary = Summary(value=[ + Summary.Value(tag='examples/sec', simple_value=examples_per_sec) + ]) + self._summary_writer.add_summary(global_step_summary, global_step) + self._summary_writer.add_summary(example_summary, global_step) + tf.compat.v1.logging.info('global_step/sec: %g', global_step_per_sec) + tf.compat.v1.logging.info('examples/sec: %g', examples_per_sec) + + +class InstallSignalHandlerHook(tf.compat.v1.train.SessionRunHook): + """Change SIGINT (CTRL^C) handler to force quit the process. + + The default behavior often results in hanging processes. + The original handler is restored after training/evaluation. + """ + + def __init__(self): + self._signal_fn = signal.getsignal(signal.SIGINT) + + def before_run(self, run_context): + signal.signal(signal.SIGINT, signal.SIG_DFL) + + def end(self, session): + signal.signal(signal.SIGINT, self._signal_fn) + + +class ExportSavedModelApiVersion(enum.Enum): + V1 = 1 + V2 = 2 + + +class BatchConfig( + collections.namedtuple('BatchConfig', [ + 'num_batch_threads', 'max_batch_size', 'batch_timeout_micros', + 'allowed_batch_sizes', 'max_enqueued_batches' + ])): + """Class to handle config inputs into the batching function.""" + + def __new__(cls, + num_batch_threads, + max_batch_size, + batch_timeout_micros, + allowed_batch_sizes, + max_enqueued_batches=100): + """Creates an BatchConfig instance. + + Args: + num_batch_threads: Number of scheduling threads for processing batches of + work. Determines the number of batches processed in parallel. + max_batch_size: Batch sizes will never be bigger than this. + batch_timeout_micros: Maximum number of microseconds to wait before + outputting an incomplete batch. + allowed_batch_sizes: Optional list of allowed batch sizes. If left empty, + does nothing. Otherwise, supplies a list of batch sizes, causing the op + to pad batches up to one of those sizes. The entries must increase + monotonically, and the final entry must equal max_batch_size. + max_enqueued_batches: The maximum depth of the batch queue. Defaults to + 100. + + Returns: + An BatchConfig instance. + """ + return super(BatchConfig, cls).__new__( + cls, + num_batch_threads=num_batch_threads, + max_batch_size=max_batch_size, + batch_timeout_micros=batch_timeout_micros, + allowed_batch_sizes=allowed_batch_sizes, + max_enqueued_batches=max_enqueued_batches) + + +@estimator_export(v1=['estimator.tpu.TPUEstimator']) +class TPUEstimator(estimator_lib.Estimator): + """Estimator with TPU support. + + TPUEstimator also supports training on CPU and GPU. You don't need to define + a separate `tf.estimator.Estimator`. + + TPUEstimator handles many of the details of running on TPU devices, such as + replicating inputs and models for each core, and returning to host + periodically to run hooks. + + TPUEstimator transforms a global batch size in params to a per-shard batch + size when calling the `input_fn` and `model_fn`. Users should specify + global batch size in constructor, and then get the batch size for each shard + in `input_fn` and `model_fn` by `params['batch_size']`. + + - For training, `model_fn` gets per-core batch size; `input_fn` may get + per-core or per-host batch size depending on `per_host_input_for_training` + in `TPUConfig` (See docstring for TPUConfig for details). + + - For evaluation and prediction, `model_fn` gets per-core batch size and + `input_fn` get per-host batch size. + + Evaluation + ========== + + `model_fn` should return `TPUEstimatorSpec`, which expects the `eval_metrics` + for TPU evaluation. If eval_on_tpu is False, the evaluation will execute on + CPU or GPU; in this case the following discussion on TPU evaluation does not + apply. + + `TPUEstimatorSpec.eval_metrics` is a tuple of `metric_fn` and `tensors`, where + `tensors` could be a list of any nested structure of `Tensor`s (See + `TPUEstimatorSpec` for details). `metric_fn` takes the `tensors` and returns + a dict from metric string name to the result of calling a metric function, + namely a `(metric_tensor, update_op)` tuple. + + One can set `use_tpu` to `False` for testing. All training, evaluation, and + predict will be executed on CPU. `input_fn` and `model_fn` will receive + `train_batch_size` or `eval_batch_size` unmodified as `params['batch_size']`. + + Current limitations: + -------------------- + + 1. TPU evaluation only works on a single host (one TPU worker) except + BROADCAST mode. + + 2. `input_fn` for evaluation should **NOT** raise an end-of-input exception + (`OutOfRangeError` or `StopIteration`). And all evaluation steps and all + batches should have the same size. + + Example (MNIST): + ---------------- + + ``` + # The metric Fn which runs on CPU. + def metric_fn(labels, logits): + predictions = tf.argmax(logits, 1) + return { + 'accuracy': tf.compat.v1.metrics.precision( + labels=labels, predictions=predictions), + } + + # Your model Fn which runs on TPU (eval_metrics is list in this example) + def model_fn(features, labels, mode, config, params): + ... + logits = ... + + if mode = tf.estimator.ModeKeys.EVAL: + return tpu_estimator.TPUEstimatorSpec( + mode=mode, + loss=loss, + eval_metrics=(metric_fn, [labels, logits])) + + # or specify the eval_metrics tensors as dict. + def model_fn(features, labels, mode, config, params): + ... + final_layer_output = ... + + if mode = tf.estimator.ModeKeys.EVAL: + return tpu_estimator.TPUEstimatorSpec( + mode=mode, + loss=loss, + eval_metrics=(metric_fn, { + 'labels': labels, + 'logits': final_layer_output, + })) + ``` + + Prediction + ========== + + Prediction on TPU is an experimental feature to support large batch inference. + It is not designed for latency-critical system. In addition, due to some + usability issues, for prediction with small dataset, CPU `.predict`, i.e., + creating a new `TPUEstimator` instance with `use_tpu=False`, might be more + convenient. + + Note: In contrast to TPU training/evaluation, the `input_fn` for prediction + *should* raise an end-of-input exception (`OutOfRangeError` or + `StopIteration`), which serves as the stopping signal to `TPUEstimator`. To be + precise, the ops created by `input_fn` produce one batch of the data. + The `predict()` API processes one batch at a time. When reaching the end of + the data source, an end-of-input exception should be raised by one of these + operations. The user usually does not need to do this manually. As long as the + dataset is not repeated forever, the `tf.data` API will raise an end-of-input + exception automatically after the last batch has been produced. + + Note: Estimator.predict returns a Python generator. Please consume all the + data from the generator so that TPUEstimator can shutdown the TPU system + properly for user. + + Current limitations: + -------------------- + 1. TPU prediction only works on a single host (one TPU worker). + + 2. `input_fn` must return a `Dataset` instance rather than `features`. In + fact, .train() and .evaluate() also support Dataset as return value. + + Example (MNIST): + ---------------- + ``` + height = 32 + width = 32 + total_examples = 100 + + def predict_input_fn(params): + batch_size = params['batch_size'] + + images = tf.random.uniform( + [total_examples, height, width, 3], minval=-1, maxval=1) + + dataset = tf.data.Dataset.from_tensor_slices(images) + dataset = dataset.map(lambda images: {'image': images}) + + dataset = dataset.batch(batch_size) + return dataset + + def model_fn(features, labels, params, mode): + # Generate predictions, called 'output', from features['image'] + + if mode == tf.estimator.ModeKeys.PREDICT: + return tf.contrib.tpu.TPUEstimatorSpec( + mode=mode, + predictions={ + 'predictions': output, + 'is_padding': features['is_padding'] + }) + + tpu_est = TPUEstimator( + model_fn=model_fn, + ..., + predict_batch_size=16) + + # Fully consume the generator so that TPUEstimator can shutdown the TPU + # system. + for item in tpu_est.predict(input_fn=input_fn): + # Filter out item if the `is_padding` is 1. + # Process the 'predictions' + ``` + + Exporting + ========= + + `export_saved_model` exports 2 metagraphs, one with `saved_model.SERVING`, and + another with `saved_model.SERVING` and `saved_model.TPU` tags. At serving + time, these tags are used to select the appropriate metagraph to load. + + Before running the graph on TPU, the TPU system needs to be initialized. If + TensorFlow Serving model-server is used, this is done automatically. If not, + please use `session.run(tpu.initialize_system())`. + + There are two versions of the API: 1 or 2. + + In V1, the exported CPU graph is `model_fn` as it is. The exported TPU graph + wraps `tpu.rewrite()` and `TPUPartitionedCallOp` around `model_fn` so + `model_fn` is on TPU by default. To place ops on CPU, + `tpu.outside_compilation(host_call, logits)` can be used. + + Example: + ---------------- + + ``` + def model_fn(features, labels, mode, config, params): + ... + logits = ... + export_outputs = { + 'logits': export_output_lib.PredictOutput( + {'logits': logits}) + } + + def host_call(logits): + class_ids = math_ops.argmax(logits) + classes = string_ops.as_string(class_ids) + export_outputs['classes'] = + export_output_lib.ClassificationOutput(classes=classes) + + tpu.outside_compilation(host_call, logits) + + ... + ``` + + In V2, `export_saved_model()` sets up `params['use_tpu']` flag to let the user + know if the code is exporting to TPU (or not). When `params['use_tpu']` is + `True`, users need to call `tpu.rewrite()`, `TPUPartitionedCallOp` and/or + `batch_function()`. + + TIP: V2 is recommended as it is more flexible (eg: batching, etc). + + @compatibility(TF2) + TPU Estimator manages its own TensorFlow graph and session, so it is not + compatible with TF2 behaviors. We recommend that you migrate to the newer + `tf.distribute.TPUStrategy`. See the + [TPU guide](https://www.tensorflow.org/guide/tpu) for details. + @end_compatibility + """ + + def __init__(self, + model_fn=None, + model_dir=None, + config=None, + params=None, + use_tpu=True, + train_batch_size=None, + eval_batch_size=None, + predict_batch_size=None, + batch_axis=None, + eval_on_tpu=True, + export_to_tpu=True, + export_to_cpu=True, + warm_start_from=None, + embedding_config_spec=None, + export_saved_model_api_version=ExportSavedModelApiVersion.V1): + """Constructs an `TPUEstimator` instance. + + Args: + model_fn: Model function as required by `Estimator` which returns + EstimatorSpec or TPUEstimatorSpec. `training_hooks`, 'evaluation_hooks', + and `prediction_hooks` must not capure any TPU Tensor inside the + model_fn. + model_dir: Directory to save model parameters, graph and etc. This can + also be used to load checkpoints from the directory into a estimator to + continue training a previously saved model. If `None`, the model_dir in + `config` will be used if set. If both are set, they must be same. If + both are `None`, a temporary directory will be used. + config: An `tpu_config.RunConfig` configuration object. Cannot be `None`. + params: An optional `dict` of hyper parameters that will be passed into + `input_fn` and `model_fn`. Keys are names of parameters, values are + basic python types. There are reserved keys for `TPUEstimator`, + including 'batch_size'. + use_tpu: A bool indicating whether TPU support is enabled. Currently, - + TPU training and evaluation respect this bit, but eval_on_tpu can + override execution of eval. See below. + train_batch_size: An int representing the global training batch size. + TPUEstimator transforms this global batch size to a per-shard batch + size, as params['batch_size'], when calling `input_fn` and `model_fn`. + Cannot be `None` if `use_tpu` is `True`. Must be divisible by total + number of replicas. + eval_batch_size: An int representing evaluation batch size. Must be + divisible by total number of replicas. + predict_batch_size: An int representing the prediction batch size. Must be + divisible by total number of replicas. + batch_axis: A python tuple of int values describing how each tensor + produced by the Estimator `input_fn` should be split across the TPU + compute shards. For example, if your input_fn produced (images, labels) + where the images tensor is in `HWCN` format, your shard dimensions would + be [3, 0], where 3 corresponds to the `N` dimension of your images + Tensor, and 0 corresponds to the dimension along which to split the + labels to match up with the corresponding images. If None is supplied, + and per_host_input_for_training is True, batches will be sharded based + on the major dimension. If tpu_config.per_host_input_for_training is + False or `PER_HOST_V2`, batch_axis is ignored. + eval_on_tpu: If False, evaluation runs on CPU or GPU. In this case, the + model_fn must return `EstimatorSpec` when called with `mode` as `EVAL`. + export_to_tpu: If True, `export_saved_model()` exports a metagraph for + serving on TPU. Note that unsupported export modes such as EVAL will be + ignored. For those modes, only a CPU model will be exported. Currently, + export_to_tpu only supports PREDICT. + export_to_cpu: If True, `export_saved_model()` exports a metagraph for + serving on CPU. + warm_start_from: Optional string filepath to a checkpoint or SavedModel to + warm-start from, or a `tf.estimator.WarmStartSettings` object to fully + configure warm-starting. If the string filepath is provided instead of + a `WarmStartSettings`, then all variables are warm-started, and it is + assumed that vocabularies and Tensor names are unchanged. + embedding_config_spec: Optional EmbeddingConfigSpec instance to support + using TPU embedding. + export_saved_model_api_version: an integer: 1 or 2. 1 corresponds to V1, + 2 corresponds to V2. (Defaults to V1). With + V1, `export_saved_model()` adds rewrite() and TPUPartitionedCallOp() for + user; while in v2, user is expected to add rewrite(), + TPUPartitionedCallOp() etc in their model_fn. + + Raises: + ValueError: `params` has reserved keys already. + """ + if config is None or not isinstance(config, tpu_config.RunConfig): + raise ValueError( + '`config` must be provided with type `tpu_config.RunConfig`') + + if params is not None and any(k in params for k in _RESERVED_PARAMS_KEYS): + raise ValueError('{} are reserved keys but existed in params {}.'.format( + _RESERVED_PARAMS_KEYS, params)) + + if use_tpu: + # Perform some very basic validations. More validations will be found in + # _InternalTPUContext. + if train_batch_size is None: + raise ValueError('`train_batch_size` cannot be `None`') + util_lib.check_positive_integer(train_batch_size, 'train_batch_size') + + if (config.tpu_config.per_host_input_for_training is + tpu_config.InputPipelineConfig.PER_SHARD_V1 and + config.tpu_config.num_cores_per_replica): + raise ValueError( + 'Model parallelism only supports per host input for training. ' + 'Please adjust TPURunconfig.per_host_input_for_training.') + + if eval_batch_size is not None: + util_lib.check_positive_integer(eval_batch_size, 'eval_batch_size') + + if predict_batch_size is not None: + util_lib.check_positive_integer(predict_batch_size, + 'predict_batch_size') + + if embedding_config_spec: + if (config.tpu_config.per_host_input_for_training not in ( + tpu_config.InputPipelineConfig.PER_HOST_V1, + tpu_config.InputPipelineConfig.PER_HOST_V2)): + raise ValueError('Only PER_HOST_V1 and PER_HOST_V2 is supported when ' + 'using TPU Embedding; got {}.'.format( + config.tpu_config.per_host_input_for_training)) + self._embedding_from_feature_columns = ( + embedding_config_spec.feature_columns is not None) + + if (not (use_tpu and eval_on_tpu) and embedding_config_spec and + embedding_config_spec.partition_strategy == 'mod'): + raise ValueError('Mod sharding of embedding tables not supported on ' + 'CPU.') + _tpu_estimator_gauge.get_cell().set(True) + # Verifies the model_fn signature according to Estimator framework. + estimator_lib._verify_model_fn_args(model_fn, params) # pylint: disable=protected-access + # We cannot store config and params in this constructor as parent + # constructor might change them, such as assigning a temp dir for + # config.model_dir. + model_function = self._augment_model_fn(model_fn, batch_axis) + + # Overwrite log_step_count_steps to disable TensorLoggingHook and + # StepCounterHook from being created in Estimator. TPUEstimator already + # added equivalent hooks in _augment_model_fn above. + self._log_every_n_steps = config.log_step_count_steps + config = config.replace(log_step_count_steps=None) + + # Passing non-None params as wrapped model_fn has it. + params = params or {} + super(TPUEstimator, self).__init__( + model_fn=model_function, + model_dir=model_dir, + config=config, + params=params, + warm_start_from=warm_start_from) + self._iterations_per_training_loop = util_lib.parse_iterations_per_loop( + self._config.tpu_config.iterations_per_loop) + # In absence of an explicit `log_every_n_secs` config, if the + # `iterations_per_loop` value is specified as time in seconds, enable + # logging every n secs based on the `iterations_per_loop` value. A trade-off + # avoiding API change on the current release. + # TODO(henrytan): add `log_every_n_secs` to RunConfig. + if self._iterations_per_training_loop.unit == 'seconds': + self._log_every_n_secs = self._iterations_per_training_loop.value + self._log_every_n_steps = None + elif self._iterations_per_training_loop.unit == 'count': + if self._log_every_n_steps is not None: + # Each session.run() lasts for iterations_per_loop. We can't log + # in-between a session.run(), and we can only log after the + # `iterations_per_loop` steps, so we can only approximate. If a user + # requests to log every N steps, we actually want to roughly log every + # N / `iterations_per_loop` steps to match the original intention. + self._log_every_n_steps = ( + int( + math.ceil( + float(self._log_every_n_steps) / + self._iterations_per_training_loop.value))) + self._log_every_n_secs = None + else: + assert False, ('Invalid TPUConfig `iterations_per_loop` value. ' + 'Indicates a bug in `iterations_per_loop` ' + 'parsing.') + + # All properties passed to _InternalTPUContext are immutable. + # pylint: disable=protected-access + self._ctx = tpu_context._get_tpu_context(self._config, train_batch_size, + eval_batch_size, + predict_batch_size, use_tpu, + eval_on_tpu, embedding_config_spec) + + self._export_to_cpu = export_to_cpu + self._export_to_tpu = export_to_tpu + + if not (isinstance(export_saved_model_api_version, + ExportSavedModelApiVersion) + or export_saved_model_api_version == 1 + or export_saved_model_api_version == 2): + raise ValueError('export_saved_model_api_version should be 1 or 2; ' + 'got {}.'.format( + export_saved_model_api_version)) + self._export_saved_model_api_version = export_saved_model_api_version + self._is_input_fn_invoked = None + + self._rendezvous = {} + + def _add_meta_graph_for_mode(self, + builder, + input_receiver_fn_map, + checkpoint_path, + save_variables=True, + mode=model_fn_lib.ModeKeys.PREDICT, + export_tags=None, + check_variables=True, + strip_default_attrs=True): + if self._export_to_tpu and mode != model_fn_lib.ModeKeys.PREDICT: + tf.compat.v1.logging.warn( + 'TPUEstimator only handles mode PREDICT for exporting ' + 'when `export_to_tpu` is `True`; Mode {} will be ignored ' + 'for TPU.'.format(mode)) + + if not self._export_to_cpu and not self._export_to_tpu: + raise ValueError('One of export_to_cpu and export_to_tpu must be true.') + + if self._export_to_cpu: + (super(TPUEstimator, self)._add_meta_graph_for_mode( + builder, + input_receiver_fn_map, + checkpoint_path, + save_variables, + mode=mode, + export_tags=export_tags, + check_variables=check_variables, + strip_default_attrs=strip_default_attrs)) + + if self._export_to_tpu and mode == model_fn_lib.ModeKeys.PREDICT: + input_receiver_fn_map = { + _INFERENCE_ON_TPU_MODE: input_receiver_fn_map[mode] + } + export_tags = [tf.saved_model.SERVING, tf.saved_model.TPU] + mode = _INFERENCE_ON_TPU_MODE + + # See b/110052256 for why `check_variables` is `False`. + if not self._export_to_cpu: + check_variables = save_variables = True + else: + check_variables = save_variables = False + (super(TPUEstimator, self)._add_meta_graph_for_mode( + builder, + input_receiver_fn_map, + checkpoint_path, + save_variables=save_variables, + mode=mode, + export_tags=export_tags, + check_variables=check_variables, + strip_default_attrs=strip_default_attrs)) + + def _call_model_fn(self, features, labels, mode, config): + if mode == _INFERENCE_ON_TPU_MODE: + context = tpu._TPUInferenceContext('tpu_inference', check_ops=False) + try: + context.Enter() + if ( + (self._export_saved_model_api_version == + ExportSavedModelApiVersion.V1) + or self._export_saved_model_api_version == 1): + result = self._call_model_fn_for_inference(features, labels, mode, + config) + else: + result = super(TPUEstimator, + self)._call_model_fn(features, labels, mode, config) + finally: + context.Exit() + return result + else: + return super(TPUEstimator, self)._call_model_fn(features, labels, mode, + config) + + def _call_model_fn_for_inference(self, features, labels, mode, config): + """Wraps `_call_model_fn` for `export_saved_model`.""" + if mode != _INFERENCE_ON_TPU_MODE: + raise ValueError('mode must be {}; ' + 'got {}.'.format(_INFERENCE_ON_TPU_MODE, mode)) + return model_fn_inference_on_tpu( + self._model_fn, + features, + labels, + config, + self._params, + batch_config=None) + + def _create_global_step(self, graph): + """Creates a global step suitable for TPUs. + + Args: + graph: The graph in which to create the global step. + + Returns: + A global step `Tensor`. + + Raises: + ValueError: if the global step tensor is already defined. + """ + return _create_global_step(graph) + + def _convert_train_steps_to_hooks(self, steps, max_steps): + with self._ctx.with_mode(model_fn_lib.ModeKeys.TRAIN) as ctx: + if ctx.is_running_on_cpu(): + return super(TPUEstimator, + self)._convert_train_steps_to_hooks(steps, max_steps) + + # On TPU. + if steps is None and max_steps is None: + raise ValueError( + 'For TPU training, one of `steps` or `max_steps` must be set. ' + 'Cannot be both `None`.') + + # Estimator.train has explicit positiveness check. + if steps is not None: + util_lib.check_positive_integer(steps, 'Train steps') + if max_steps is not None: + util_lib.check_positive_integer(max_steps, 'Train max_steps') + + return [ + _TPUStopAtStepHook(self._iterations_per_training_loop, steps, max_steps) + ] + + def _convert_eval_steps_to_hooks(self, steps): + with self._ctx.with_mode(model_fn_lib.ModeKeys.EVAL) as ctx: + if ctx.is_running_on_cpu(): + return super(TPUEstimator, self)._convert_eval_steps_to_hooks(steps) + + if steps is None: + raise ValueError('Evaluate `steps` must be set on TPU. Cannot be `None`.') + + util_lib.check_positive_integer(steps, 'Eval steps') + + return [ + evaluation._StopAfterNEvalsHook( # pylint: disable=protected-access + num_evals=steps), + _SetEvalIterationsHook(steps) + ] + + def _call_input_fn(self, input_fn, mode, input_context=None): + """Calls the input function. + + Args: + input_fn: The input function. + mode: ModeKeys + input_context: Optional instance of `tf.distribute.InputContext`. + + Returns: + In TPU mode, returns an input_fn to be called later in model_fn. + Otherwise, calls the input_fn and returns either fatures or + (features, labels). + + Raises: + ValueError: if input_fn takes invalid arguments or does not have `params`. + """ + input_fn_args = function_utils.fn_args(input_fn) + config = self.config # a deep copy. + kwargs = {} + if 'params' in input_fn_args: + kwargs['params'] = self.params # a deep copy. + else: + raise ValueError('input_fn ({}) does not include params argument, ' + 'required by TPUEstimator to pass batch size as ' + 'params["batch_size"]'.format(input_fn)) + if 'config' in input_fn_args: + kwargs['config'] = config + + if 'mode' in input_fn_args: + kwargs['mode'] = mode + + if 'input_context' in input_fn_args: + kwargs['input_context'] = input_context + + # Records the fact input_fn has been invoked. + self._is_input_fn_invoked = True + + with self._ctx.with_mode(mode) as ctx: + if (ctx.is_running_on_cpu() and + ctx.is_input_slice_broadcast_to_all_cores()): + raise ValueError('Invalid TPUConfig `eval_training_input_configuration`' + ' value. SLICED mode only works on use_tpu = True.') + # Setting the batch size in params first. This helps user to have same + # input_fn for use_tpu=True/False. + batch_size_for_input_fn = ctx.batch_size_for_input_fn + if batch_size_for_input_fn is not None: + _add_item_to_params(kwargs['params'], _BATCH_SIZE_KEY, + batch_size_for_input_fn) + + # For export_saved_model, input_fn is never passed to Estimator. So, + # `is_export_mode` must be False. + if ctx.is_running_on_cpu(is_export_mode=False): + with tf.compat.v1.device('/device:CPU:0'): + return input_fn(**kwargs) + + # For TPU computation, input_fn should be invoked in a tf.while_loop for + # performance. While constructing the tf.while_loop, the structure of + # inputs returned by the `input_fn` needs to be recorded. The structure + # includes whether features or labels is dict or single Tensor, dict keys, + # tensor shapes, and dtypes. The recorded structure is used to create the + # infeed dequeue ops, which must be wrapped and passed as a Fn, called + # inside the TPU computation, as the TPU computation is wrapped inside a + # tf.while_loop also. So, we either pass input_fn to model_fn or pass + # dequeue_fn to model_fn. Here, `input_fn` is passed directly as + # `features` in `model_fn` signature. + def _input_fn(ctx): + _add_item_to_params(kwargs['params'], _CTX_KEY, ctx) + return input_fn(**kwargs) + + return _input_fn + + def _validate_features_in_predict_input(self, result): + """Skip the validation. + + For TPUEstimator, we do not need to check the result type. `_InputPipeline` + has stronger check. Parent class's check generates confusing warning msg. + + Args: + result: `features` returned by input_fn. + """ + pass + + def train(self, + input_fn, + epochs, + hooks=None, + steps=None, + max_steps=None, + saving_listeners=None, + tracker=None): + rendezvous = error_handling.ErrorRendezvous(num_sources=3) + self._rendezvous[model_fn_lib.ModeKeys.TRAIN] = rendezvous + try: + return super(TPUEstimator, self).train( + input_fn=input_fn, + epochs=epochs, + hooks=hooks, + steps=steps, + max_steps=max_steps, + saving_listeners=saving_listeners, + tracker=tracker) + except Exception: # pylint: disable=broad-except + rendezvous.record_error('training_loop', sys.exc_info()) + finally: + rendezvous.record_done('training_loop') + rendezvous.raise_errors() + + def evaluate(self, + input_fn, + steps=None, + hooks=None, + checkpoint_path=None, + name=None): + rendezvous = error_handling.ErrorRendezvous(num_sources=3) + self._rendezvous[model_fn_lib.ModeKeys.EVAL] = rendezvous + try: + return super(TPUEstimator, self).evaluate( + input_fn, + steps=steps, + hooks=hooks, + checkpoint_path=checkpoint_path, + name=name) + except Exception: # pylint: disable=broad-except + rendezvous.record_error('evaluation_loop', sys.exc_info()) + finally: + rendezvous.record_done('evaluation_loop') + rendezvous.raise_errors() + + def predict(self, + input_fn, + predict_keys=None, + hooks=None, + checkpoint_path=None, + yield_single_examples=True): + rendezvous = error_handling.ErrorRendezvous(num_sources=3) + self._rendezvous[model_fn_lib.ModeKeys.PREDICT] = rendezvous + try: + for result in super(TPUEstimator, self).predict( + input_fn=input_fn, + predict_keys=predict_keys, + hooks=hooks, + checkpoint_path=checkpoint_path, + yield_single_examples=yield_single_examples): + yield result + except Exception: # pylint: disable=broad-except + rendezvous.record_error('prediction_loop', sys.exc_info()) + finally: + rendezvous.record_done('prediction_loop') + rendezvous.raise_errors() + + rendezvous.record_done('prediction_loop') + rendezvous.raise_errors() + + def _augment_model_fn(self, model_fn, batch_axis): + """Returns a new model_fn, which wraps the TPU support.""" + + def _model_fn(features, labels, mode, config, params): + """A Estimator `model_fn` for TPUEstimator.""" + + # `input_fn` is called in `train()`, `evaluate()`, and `predict()`, + # but not in `export_saved_model()`. + if self._is_input_fn_invoked: + is_export_mode = False + else: + is_export_mode = True + + # Clear the bit. + self._is_input_fn_invoked = None + + if is_export_mode: + if mode == _INFERENCE_ON_TPU_MODE: + _add_item_to_params(params, _USE_TPU_KEY, True) + mode = model_fn_lib.ModeKeys.PREDICT + else: + _add_item_to_params(params, _USE_TPU_KEY, False) + + with self._ctx.with_mode(mode) as ctx: + model_fn_wrapper = _ModelFnWrapper(model_fn, config, params, ctx) + + # examples_hook is added to training_hooks for both CPU and TPU + # execution. + if (self._log_every_n_steps is not None or + self._log_every_n_secs is not None): + examples_hook = ExamplesPerSecondHook( + ctx.global_batch_size, + # pylint:disable=g-long-ternary + output_dir=(self.model_dir + if not config or config.save_summary_steps else None), + # pylint:enable=g-long-ternary + every_n_steps=self._log_every_n_steps, + every_n_secs=self._log_every_n_secs) + + if ctx.is_running_on_cpu(is_export_mode=is_export_mode): + tf.compat.v1.logging.info('Running %s on CPU/GPU', mode) + estimator_spec = model_fn_wrapper.call_without_tpu( + features, labels, is_export_mode=is_export_mode) + if (self._log_every_n_steps is not None or + self._log_every_n_secs is not None): + estimator_spec = estimator_spec._replace( + training_hooks=estimator_spec.training_hooks + (examples_hook,)) + return estimator_spec + + assert labels is None, '`labels` passed to `model_fn` must be `None`.' + # TPUEstimator._call_input_fn passes `input_fn` as features to here. + assert callable(features), '`input_fn` is not callable.' + input_fn = features + + tpu_init_ops = [] + if ctx.embedding_config and mode == model_fn_lib.ModeKeys.TRAIN: + dummy_table_variables, dummy_table_variables_init = ( + tpu_embedding_gradient.create_dummy_table_variables( + ctx.embedding_config.tpu_embedding)) + ctx.embedding_config.dummy_table_variables = dummy_table_variables + tpu_init_ops.append(dummy_table_variables_init) + + input_holders = _InputPipeline(input_fn, batch_axis, ctx) + enqueue_ops, dequeue_fn, input_hooks, run_infeed_loop_on_coordinator = ( + input_holders.generate_infeed_enqueue_ops_and_dequeue_fn()) + + graph = tf.compat.v1.get_default_graph() + for enqueue_op in enqueue_ops: + if isinstance(enqueue_op, list): + graph.get_collection_ref(_TPU_ENQUEUE_OPS).extend(enqueue_op) + else: + graph.add_to_collection(_TPU_ENQUEUE_OPS, enqueue_op) + + if mode == model_fn_lib.ModeKeys.TRAIN: + compile_op, loss, host_call, scaffold_fn, training_hooks = ( + _train_on_tpu_system(ctx, model_fn_wrapper, dequeue_fn)) + has_saver_hook = training_hooks and any( + isinstance(hook, tf.compat.v1.train.CheckpointSaverHook) + for hook in training_hooks) + if ctx.embedding_config: + g = tf.compat.v1.get_default_graph() + table_to_config_dict = ( + ctx.embedding_config.tpu_embedding.table_to_config_dict) + optimization_parameters = ( + ctx.embedding_config.tpu_embedding.optimization_parameters) + if self._embedding_from_feature_columns: + embedding_variable_name_by_table, slot_variable_names_by_table = ( + _tpu_estimator_embedding.get_full_variable_names( + g, table_to_config_dict, optimization_parameters)) + else: + embedding_variable_name_by_table = None + slot_variable_names_by_table = None + embedding_variables_and_ops = ( + ctx.embedding_config.tpu_embedding.create_variables_and_ops( + embedding_variable_name_by_table, + slot_variable_names_by_table)) + tpu_init_ops.extend(embedding_variables_and_ops.load_ops()) + # scaffold_fn must be called after variables for TPU embedding has + # been created on CPU, as user might reinitialize those from some + # checkpoint within scaffold_fn. + scaffold = _get_scaffold(scaffold_fn) + + host_ops = host_call.create_tpu_hostcall() + + shutdown_hooks = [] + shutdown_mode = os.environ.get('TF_TPU_GRACEFUL_SHUTDOWN_MODE', + 'reset_computation') + if shutdown_mode: + if shutdown_mode == 'shutdown_worker': + finalizer_hooks = [ + session_support.ShutdownLameWorkers(), + ] + elif shutdown_mode == 'shutdown_all_workers': + finalizer_hooks = [ + session_support.ShutdownAllWorkers(), + ] + elif shutdown_mode == 'reset_computation': + finalizer_hooks = [ + session_support.ResetComputation(), + ] + elif not shutdown_mode: + finalizer_hooks = [] + else: + raise ValueError('Unknown TF_TPU_GRACEFUL_SHUTDOWN_MODE "%s"' % + shutdown_mode) + + if finalizer_hooks: + if has_saver_hook: + saver = _NotSaver( + 'No save on shutdown when there are user-defined ' + 'CheckpointSaverHooks') + else: + saver = None # Yes automatic save on shutdown. + shutdown_hooks.append( + session_support.GracefulShutdownHook( + checkpoint_prefix=self.model_dir + '/model.ckpt', + on_shutdown_hooks=finalizer_hooks, + saver=saver)) + + with tf.control_dependencies([loss]): + global_step = tf.identity(tf.compat.v1.train.get_global_step()) + hooks = input_hooks + shutdown_hooks + + if ctx.feed_hook is not None: + tf.compat.v1.logging.info( + 'Use user implemented tpu infeed outfeed session hook class.') + infeed_outfeed_session_hook_class = ctx.feed_hook + else: + infeed_outfeed_session_hook_class = TPUInfeedOutfeedSessionHook + + hooks.extend([ + infeed_outfeed_session_hook_class( + ctx, + enqueue_ops, + host_ops, + tpu_compile_op=compile_op, + run_infeed_loop_on_coordinator=( + run_infeed_loop_on_coordinator), + rendezvous=self._rendezvous[mode], + master=self._config.master, + session_config=self._session_config, + tpu_init_ops=tpu_init_ops, + outfeed_every_n_steps=self._config.tpu_config + .experimental_host_call_every_n_steps), + InstallSignalHandlerHook() + ]) + if _check_add_preemption_hook(self._config.cluster): + hooks.extend( + [preempted_hook.CloudTPUPreemptedHook(self._config.cluster)]) + if (self._log_every_n_steps is not None or + self._log_every_n_secs is not None): + if self._iterations_per_training_loop.unit == 'count': + examples_hook._set_steps_per_run( # pylint: disable=protected-access + self._iterations_per_training_loop.value) + hooks.append( + tf.compat.v1.train.LoggingTensorHook( + { + 'loss': tf.identity(loss), + 'step': global_step, + }, + every_n_iter=self._log_every_n_steps, + every_n_secs=self._log_every_n_secs)) + hooks.append(examples_hook) + + if training_hooks: + hooks.extend(training_hooks) + + chief_hooks = [] + if (not has_saver_hook and + (self._config.save_checkpoints_secs or + self._config.save_checkpoints_steps)): + checkpoint_hook = tf.compat.v1.train.CheckpointSaverHook( + self.model_dir, + save_secs=self._config.save_checkpoints_secs, + save_steps=self._config.save_checkpoints_steps, + scaffold=scaffold, + save_graph_def=self._config.checkpoint_save_graph_def) + if self._iterations_per_training_loop.unit == 'count': + checkpoint_hook._set_steps_per_run( # pylint: disable=protected-access + self._iterations_per_training_loop.value) + chief_hooks.append(checkpoint_hook) + else: + tf.compat.v1.logging.info('Bypassing TPUEstimator hook') + + tf.compat.v1.summary.scalar(model_fn_lib.LOSS_METRIC_KEY, loss) + with tf.control_dependencies([loss]): + update_ops = _sync_variables_ops(ctx) + if ctx.embedding_config: + update_ops.extend(embedding_variables_and_ops.retrieve_ops()) + + # Validate the TPU training graph to catch basic errors + _validate_tpu_training_graph(ctx) + + train_op = tf.group(*update_ops) + graph.add_to_collection(_TPU_TRAIN_OP, train_op) + + return model_fn_lib.EstimatorSpec( + mode, + loss=loss, + training_chief_hooks=chief_hooks, + training_hooks=hooks, + train_op=train_op, + scaffold=scaffold) + + if mode == model_fn_lib.ModeKeys.EVAL: + compile_op, total_loss, host_calls, scaffold_fn, eval_hooks = ( + _eval_on_tpu_system(ctx, model_fn_wrapper, dequeue_fn)) + if ctx.embedding_config: + g = tf.compat.v1.get_default_graph() + table_to_config_dict = ( + ctx.embedding_config.tpu_embedding.table_to_config_dict) + if self._embedding_from_feature_columns: + embedding_variable_name_by_table, _ = ( + _tpu_estimator_embedding.get_full_variable_names( + g, table_to_config_dict)) + else: + embedding_variable_name_by_table = None + embedding_variables_and_ops = ( + ctx.embedding_config.tpu_embedding.create_variables_and_ops( + embedding_variable_name_by_table)) + tpu_init_ops.extend(embedding_variables_and_ops.load_ops()) + # scaffold_fn must be called after variables for TPU embedding has + # been created on CPU, as user might reinitialize those from some + # checkpoint within scaffold_fn. + scaffold = _get_scaffold(scaffold_fn) + iterations_per_loop_var = _create_or_get_iterations_per_loop() + mean_loss = tf.compat.v1.div( + total_loss, + tf.cast(iterations_per_loop_var, dtype=total_loss.dtype)) + + with tf.control_dependencies([mean_loss]): + # After TPU evaluation computation is done (the mean_loss tensor), + # reads all variables back from TPU and updates the eval step + # counter properly + internal_ops_to_run = _sync_variables_ops(ctx) + internal_ops_to_run.append( + _increase_eval_step_op(iterations_per_loop_var)) + + host_call_ret = host_calls.create_tpu_hostcall() + eval_metric_ops = {} + eval_update_ops = [] + + eval_metrics = host_call_ret.get('eval_metrics', {}) + if eval_metrics: + # Creates a dummy metric update_op for all metrics. Estimator + # expects all metrics in `eval_metric_ops` have update_op and calls + # them one by one. The real metric update_ops are invoked in a + # separated thread. So, here give Estimator the dummy op for all + # metrics. + with tf.control_dependencies(internal_ops_to_run): + dummy_update_op = tf.no_op() + + for k, v in eval_metrics.items(): + eval_metric_ops[k] = (v[0], dummy_update_op) + eval_update_ops.append(v[1]) + else: + # If no eval metrics are passed, create an identity node for the + # loss and add `internal_ops_to_run` to its dependencies. So + # `internal_ops_to_run` can be executed. + with tf.control_dependencies(internal_ops_to_run): + mean_loss = tf.identity(mean_loss) + + if 'host_call' not in host_call_ret: + host_ops = [] + else: + host_ops = host_call_ret['host_call'] + hooks = [ + TPUInfeedOutfeedSessionHook( + ctx, + enqueue_ops, + eval_update_ops + host_ops, + tpu_compile_op=compile_op, + run_infeed_loop_on_coordinator=( + run_infeed_loop_on_coordinator), + rendezvous=self._rendezvous[mode], + master=self._config.evaluation_master, + session_config=self._session_config, + tpu_init_ops=tpu_init_ops) + ] + input_hooks + + if _check_add_preemption_hook(self._config.cluster): + hooks.extend( + [preempted_hook.CloudTPUPreemptedHook(self._config.cluster)]) + + if eval_hooks: + hooks.extend(eval_hooks) + + return model_fn_lib.EstimatorSpec( + mode, + loss=mean_loss, + evaluation_hooks=hooks, + eval_metric_ops=eval_metric_ops, + scaffold=scaffold) + + # Predict + assert mode == model_fn_lib.ModeKeys.PREDICT + + (compile_op, dummy_predict_op, host_calls, scaffold_fn, + prediction_hooks) = _predict_on_tpu_system(ctx, model_fn_wrapper, + dequeue_fn) + scaffold = _get_scaffold(scaffold_fn) + with tf.control_dependencies([dummy_predict_op]): + internal_ops_to_run = _sync_variables_ops(ctx) + with tf.control_dependencies(internal_ops_to_run): + dummy_predict_op = tf.no_op() + + # In train and evaluation, the main TPU program is passed to monitored + # training session to run. Infeed enqueue and outfeed dequeue are + # executed in side threads. This is not the configuration for + # prediction mode. + # + # For prediction, the Estimator executes the EstimatorSpec.predictions + # directly and yield the element (via generator) to call site. So, the + # outfeed based prediction must be passed to MonitoredSession directly. + # Other parts of the TPU execution are organized as follows. + # + # 1. All outfeed based Tensors must be grouped with predictions Tensors + # to form a single invocation. This avoid the issue we might trigger + # multiple outfeeds incorrectly. To achieve this, `host_call` is + # placed in control_dependencies of `stopping_signals`, and + # `stopping_signals` is passed into _StoppingPredictHook, which sets + # the `stopping_signals` as SessionRunArgs. MonitoredSession merges + # all SessionRunArgs with the fetch in session.run together. + # + # 2. The TPU program (dummy_predict_op) and enqueue_ops (infeed Enqueue) + # are grouped together. They will be launched once and only once in + # side threads and they quit naturally according to the SAME stopping + # condition. + enqueue_ops.append(dummy_predict_op) + + host_call_ret = host_calls.create_tpu_hostcall() + if 'host_call' not in host_call_ret: + host_ops = [] + else: + host_ops = host_call_ret['host_call'] + + predictions = host_call_ret['predictions'] + _verify_cross_hosts_transfer_size( + predictions, + message=( + 'The estimated size for TPUEstimatorSpec.predictions is too ' + 'large.')) + signals = host_call_ret['signals'] + + with tf.control_dependencies(host_ops): + host_ops = [] # Empty, we do do not need it anymore. + scalar_stopping_signal = _StopSignals.as_scalar_stopping_signal( + signals) + predictions = _PaddingSignals.slice_tensor_or_dict( + predictions, signals) + + hooks = [ + _StoppingPredictHook(scalar_stopping_signal), + TPUInfeedOutfeedSessionHookForPrediction( + ctx, + enqueue_ops, + host_ops, + rendezvous=self._rendezvous[mode], + tpu_compile_op=compile_op, + master=self._config.master, + session_config=self._session_config), + ] + input_hooks + + if prediction_hooks: + hooks.extend(prediction_hooks) + + return model_fn_lib.EstimatorSpec( + mode, + prediction_hooks=hooks, + predictions=predictions, + scaffold=scaffold) + + return _model_fn + + +def _check_add_preemption_hook(cluster): + return (tpu_cluster_resolver.is_running_in_gce() and cluster and isinstance( + cluster, tf.distribute.cluster_resolver.TPUClusterResolver) and + cluster._cloud_tpu_client.api_available()) + + +def _export_output_to_tensors(export_output): + """Get a list of `Tensors` used in `export_output`. + + Args: + export_output: an `ExportOutput` object such as `ClassificationOutput`, + `RegressionOutput`, or `PredictOutput`. + + Returns: + a list of tensors used in export_output. + + Raises: + ValueError: if `export_output` is not one of `ClassificationOutput`, + `RegressionOutput`, or `PredictOutput`. + """ + if isinstance(export_output, export_output_lib.ClassificationOutput): + return [export_output.scores, export_output.classes] + elif isinstance(export_output, export_output_lib.RegressionOutput): + return [export_output.value] + elif isinstance(export_output, export_output_lib.PredictOutput): + return list(export_output.outputs.values()) + else: + raise ValueError( + '`export_output` must be have type `ClassificationOutput`, ' + '`RegressionOutput`, or `PredictOutput`; got {}.'.format(export_output)) + + +def _clone_export_output_with_tensors(export_output, tensors): + """Clones `export_output` but with new `tensors`. + + Args: + export_output: an `ExportOutput` object such as `ClassificationOutput`, + `RegressionOutput`, or `PredictOutput`. + tensors: a list of `Tensors` used to construct a new `export_output`. + + Returns: + A dict similar to `export_output` but with `tensors`. + + Raises: + ValueError: if `export_output` is not one of `ClassificationOutput`, + `RegressionOutput`, or `PredictOutput`. + """ + if isinstance(export_output, export_output_lib.ClassificationOutput): + if len(tensors) != 2: + raise ValueError('tensors must be of length 2; ' + 'got {}.'.format(len(tensors))) + return export_output_lib.ClassificationOutput(*tensors) + elif isinstance(export_output, export_output_lib.RegressionOutput): + if len(tensors) != 1: + raise ValueError('tensors must be of length 1; ' + 'got {}'.format(len(tensors))) + return export_output_lib.RegressionOutput(*tensors) + elif isinstance(export_output, export_output_lib.PredictOutput): + return export_output_lib.PredictOutput( + dict(zip(export_output.outputs.keys(), tensors))) + else: + raise ValueError( + '`export_output` must be have type `ClassificationOutput`, ' + '`RegressionOutput`, or `PredictOutput`; got {}.'.format(export_output)) + + +def _eval_on_tpu_system(ctx, model_fn_wrapper, dequeue_fn): + """Executes `model_fn_wrapper` multiple times on all TPU shards.""" + iterations_per_loop_var = _create_or_get_iterations_per_loop() + + (single_tpu_eval_step, host_calls, captured_scaffold_fn, captured_eval_hooks + ) = model_fn_wrapper.convert_to_single_tpu_eval_step(dequeue_fn) + + @tpu_function.on_device_training_loop + def multi_tpu_eval_steps_on_single_shard(replica_id): + # `tpu.split_compile_and_shard()` splits and passes input for each + # replica as an array. As so, correctly reshape the input to be a + # scalar. + replica_id = tf.reshape(replica_id, []) + with tpu_context._TPUEstimatorReplicaContext(replica_id): # pylint: disable=protected-access + return training_loop.repeat(iterations_per_loop_var, single_tpu_eval_step, + [_ZERO_LOSS]) + + # Add input that represents id for each replica in sync so that + # _TPUEstimatorReplicaContext can be correctly entered during + # replicated computation. + replica_id_inputs = [] + replica_id_inputs.append([tf.constant(i) for i in range(ctx.num_replicas)]) + + ( + compile_op, + loss, + ) = tpu.split_compile_and_shard( + multi_tpu_eval_steps_on_single_shard, + inputs=replica_id_inputs, + num_shards=ctx.num_replicas, + outputs_from_all_shards=False, + device_assignment=ctx.device_assignment) + + loss = loss[0] + return (compile_op, loss, host_calls, captured_scaffold_fn, + captured_eval_hooks.get()) + + +def _train_on_tpu_system(ctx, model_fn_wrapper, dequeue_fn): + """Executes `model_fn_wrapper` multiple times on all TPU shards.""" + iterations_per_loop_var = _create_or_get_iterations_per_loop() + + (single_tpu_train_step, host_call, captured_scaffold_fn, + captured_training_hooks) = ( + model_fn_wrapper.convert_to_single_tpu_train_step(dequeue_fn)) + + @tpu_function.on_device_training_loop + def multi_tpu_train_steps_on_single_shard(replica_id): + # `tpu.split_compile_and_shard()` splits and passes input for each + # replica as an array. As so, correctly reshape the input to be a + # scalar. + replica_id = tf.reshape(replica_id, []) + with tpu_context._TPUEstimatorReplicaContext(replica_id): # pylint: disable=protected-access + outputs = training_loop.while_loop( + lambda i, loss: i < iterations_per_loop_var, + lambda i, loss: [i + 1, single_tpu_train_step(i)], + inputs=[0, _INITIAL_LOSS]) + return outputs[1:] + + # Add input that represents id for each replica in sync so that + # _TPUEstimatorReplicaContext can be correctly entered during + # replicated computation. + replica_id_inputs = [] + replica_id_inputs.append([tf.constant(i) for i in range(ctx.num_replicas)]) + + (compile_op, loss) = tpu.split_compile_and_shard( + multi_tpu_train_steps_on_single_shard, + inputs=replica_id_inputs, + num_shards=ctx.num_replicas, + outputs_from_all_shards=False, + device_assignment=ctx.device_assignment) + + loss = loss[0] + return (compile_op, loss, host_call, captured_scaffold_fn, + captured_training_hooks.get()) + + +def _predict_on_tpu_system(ctx, model_fn_wrapper, dequeue_fn): + """Executes `model_fn_wrapper` multiple times on all TPU shards.""" + (single_tpu_predict_step, host_calls, captured_scaffold_fn, + captured_predict_hooks + ) = model_fn_wrapper.convert_to_single_tpu_predict_step(dequeue_fn) + + @tpu_function.on_device_training_loop + def multi_tpu_predict_steps_on_single_shard(replica_id): + # `tpu.split_compile_and_shard()` splits and passes input for each + # replica as an array. As so, correctly reshape the input to be a + # scalar. + replica_id = tf.reshape(replica_id, []) + with tpu_context._TPUEstimatorReplicaContext(replica_id): # pylint: disable=protected-access + + def cond(scalar_stopping_signal): + return tf.math.logical_not( + _StopSignals.should_stop(scalar_stopping_signal)) + + inputs = [_StopSignals.NON_STOPPING_SIGNAL] + outputs = training_loop.while_loop( + cond, single_tpu_predict_step, inputs=inputs, name=b'loop') + return outputs + + # Add input that represents id for each replica in sync so that + # _TPUEstimatorReplicaContext can be correctly entered during + # replicated computation. + replica_id_inputs = [] + replica_id_inputs.append([tf.constant(i) for i in range(ctx.num_replicas)]) + ( + compile_op, + dummy_predict_op, + ) = tpu.split_compile_and_shard( + multi_tpu_predict_steps_on_single_shard, + inputs=replica_id_inputs, + num_shards=ctx.num_replicas, + outputs_from_all_shards=False, + device_assignment=ctx.device_assignment) + + dummy_predict_op = dummy_predict_op[0] + return (compile_op, dummy_predict_op, host_calls, captured_scaffold_fn, + captured_predict_hooks.get()) + + +def _wrap_computation_in_while_loop(device, op_fn): + """Wraps the ops generated by `op_fn` in tf.while_loop.""" + + def computation(i): + with tf.control_dependencies(op_fn()): + return i + 1 + + iterations_per_loop_var = _create_or_get_iterations_per_loop() + # By setting parallel_iterations=1, the parallel execution in while_loop is + # basically turned off. + with tf.compat.v1.device(device): + iterations = tf.identity(iterations_per_loop_var) + return tf.compat.v1.while_loop( + lambda i: i < iterations, + computation, [tf.constant(0)], + parallel_iterations=1) + + +def _wrap_computation_in_while_loop_with_stopping_signals(device, op_fn): + """Wraps the ops generated by `op_fn` in tf.while_loop.""" + + def cond(scalar_stopping_signal): + return tf.math.logical_not(_StopSignals.should_stop(scalar_stopping_signal)) + + def computation(unused_scalar_stopping_signal): + return_value = op_fn() + execute_ops = return_value['ops'] + signals = return_value['signals'] + with tf.control_dependencies(execute_ops): + return _StopSignals.as_scalar_stopping_signal(signals) + + # By setting parallel_iterations=1, the parallel execution in while_loop is + # basically turned off. + with tf.compat.v1.device(device): + return tf.compat.v1.while_loop( + cond, + computation, [_StopSignals.NON_STOPPING_SIGNAL], + parallel_iterations=1) + + +def _validate_tpu_training_graph(ctx): + """Validate graph before running distributed training. + + Args: + ctx: A `_InternalTPUContext` instance with mode. + + Raises: + ValueError: If the graph seems invalid for running on device + """ + if control_flow_util.ENABLE_CONTROL_FLOW_V2: + return # b/124241278 + + operations = tf.compat.v1.get_default_graph().get_operations() + + # Check if there is atleast one CrossReplicaSum operation in the graph + # This should be introduced by using the CrossShardOptimizer wrapper + cross_replica_sum_ops = [ + o for o in operations if o.type == _CROSS_REPLICA_SUM_OP + ] + if not cross_replica_sum_ops and ctx.num_replicas > 1: + raise ValueError( + 'CrossShardOptimizer must be used for model training on TPUs.') + + +class _CapturedObject(object): + """A placeholder to capture an object. + + This is useful when we need to capture a Python object in the Tensorflow + control flow body function and use it outside the control flow. + """ + + def __init__(self): + self._object = None + self._captured = False + + def capture(self, o): + if self._captured: + raise RuntimeError( + 'InternalError: Object can capture only once. Please file bug.') + + self._captured = True + self._object = o + + def get(self): + if not self._captured: + raise RuntimeError( + 'InternalError: Object is not captured properly before `get`. ' + 'Please file bug.') + return self._object + + +def _get_scaffold(captured_scaffold_fn): + """Retrieves the Scaffold from `captured_scaffold_fn`.""" + with _CapturingContext(message='Inside scaffold_fn'): + scaffold_fn = captured_scaffold_fn.get() + if scaffold_fn: + scaffold = scaffold_fn() + if scaffold is None: + raise ValueError( + 'TPUEstimatorSpec.scaffold_fn returns None, which is not allowed') + else: + scaffold = None + + if scaffold: + wrapped_finalize = scaffold.finalize + + def _finalize(): + with _CapturingContext('Inside Scaffold.finalize'): + wrapped_finalize() + + scaffold.finalize = _finalize + return scaffold + + +class _CapturingContext(control_flow_ops.ControlFlowContext): + """Tracks references to Tensors defined in TPU replication.""" + + def __init__(self, message): + control_flow_ops.ControlFlowContext.__init__(self) + self._message = message + + def to_control_flow_context_def(self, context_def, export_scope=None): + # pylint: disable=useless-super-delegation + # NOTE(slebedev): the method is required by `ControlFlowContext`. + super(_CapturingContext, + self).to_control_flow_context_def(context_def, export_scope) + + def AddOp(self, op): # pylint: disable=invalid-name + for c in op.inputs: + if tpu._TPU_REPLICATE_ATTR in c.op.node_def.attr: # pylint: disable=protected-access + raise ValueError('{}: Op {} depends on TPU computation {}, ' + 'which is not allowed.'.format(self._message, op, c)) + + def AddValue(self, value): + self.AddOp(value.op) + return value + + def __enter__(self): + # pylint: disable=protected-access + self._g = tf.compat.v1.get_default_graph() + self._old = self._g._get_control_flow_context() + self._g._set_control_flow_context(self) + # pylint: enable=protected-access + + def __exit__(self, _, __, ___): # pylint: disable=invalid-name + self._g._set_control_flow_context(self._old) # pylint: disable=protected-access + + +class _Inputs(object): + """A data structure representing the input_fn returned values. + + This also supports the returned value from input_fn as `Dataset`. + """ + + def __init__(self, features=None, labels=None, dataset=None, signals=None): + if dataset is not None and (features is not None or labels is not None or + signals is not None): + raise RuntimeError('Internal Error: Either (features and labels) or ' + 'dataset should be provided, not both. Please file ' + 'bug') + + self._features = features + self._labels = labels + self._signals = signals + + self._dataset = dataset + self._iterator = None + + @staticmethod + def from_input_fn(return_values): + """Returns an `_Inputs` instance according to `input_fn` return value.""" + if isinstance(return_values, tf.compat.v2.data.Dataset): + dataset = return_values + return _Inputs(dataset=dataset) + + features, labels = _Inputs._parse_inputs(return_values) + return _Inputs(features, labels) + + @staticmethod + def _parse_inputs(return_values): + if isinstance(return_values, tuple): + features, labels = return_values + else: + features, labels = return_values, None + return features, labels + + @property + def is_dataset(self): + """Returns True if the return value from input_fn is Dataset.""" + return self._dataset is not None + + def dataset_initializer(self): + """Returns the dataset's initializer. + + The initializer must be run before calling `features_and_labels`. + """ + self._iterator = tf.compat.v1.data.make_initializable_iterator( + self._dataset) + return self._iterator.initializer + + def features_and_labels(self): + """Gets `features` and `labels`.""" + if self.is_dataset: + if self._iterator is None: + raise RuntimeError('Internal error: Must run dataset_initializer ' + 'before calling features_and_labels(). Please file ' + 'a bug!') + return _Inputs._parse_inputs(self._iterator.get_next()) + + return (self._features, self._labels) + + def signals(self): + return self._signals + + @property + def dataset(self): + return self._dataset + + +class _InputsWithStoppingSignals(_Inputs): + """Inputs with `_StopSignals` inserted into the dataset.""" + + def __init__(self, + dataset, + batch_size, + add_padding=False, + num_invocations_per_step=1): + + assert dataset is not None + user_provided_dataset = dataset.map( + _InputsWithStoppingSignals.insert_stopping_signal( + stop=False, batch_size=batch_size, add_padding=add_padding)) + if num_invocations_per_step == 1: + final_batch_dataset = dataset.take(1).map( + _InputsWithStoppingSignals.insert_stopping_signal( + stop=True, batch_size=batch_size, add_padding=add_padding)) + else: + # We append (2 * num_invocations_per_step - 1) batches for exhausting the + # user_provided_dataset and stop properly. + # For example, if num_invocations_per_step is 2, we append 3 additional + # padding batches: b1, b2, b3. + # If user_provided_dataset contains two batches: a1, a2 + # Step 1: [a1, a2] + # Step 2: [b1, b2] -> STOP + # If user_provided_dataset contains three batches: a1, a2, a3. + # The training loops: + # Step 1: [a1, a2] + # Step 2: [a3, b1] + # Step 3: [b2, b3] -> STOP. + final_batch_dataset = dataset.take(1).map( + _InputsWithStoppingSignals.insert_stopping_signal( + stop=True, batch_size=batch_size, add_padding=add_padding)) + final_batch_dataset = final_batch_dataset.repeat( + 2 * num_invocations_per_step - 1) + + def _set_mask(data_dict): + signals = data_dict['signals'] + signals['padding_mask'] = tf.compat.v1.ones_like( + signals['padding_mask']) + data_dict['signals'] = signals + return data_dict + + # Mask out the extra batch. + final_batch_dataset = final_batch_dataset.map(_set_mask) + + dataset = user_provided_dataset.concatenate(final_batch_dataset).prefetch(2) + + super(_InputsWithStoppingSignals, self).__init__(dataset=dataset) + self._current_inputs = None + + def features_and_labels(self): + if self._current_inputs is not None: + raise RuntimeError( + 'Internal Error: The previous inputs have not been properly ' + 'consumed. First call features_and_labels, then call signals.') + + inputs_with_signals = self._iterator.get_next() + features = inputs_with_signals['features'] + labels = inputs_with_signals.get('labels') + + self._current_inputs = inputs_with_signals + return features, labels + + def signals(self): + """Returns the `Signals` from `_Inputs`.""" + if self._current_inputs is None: + raise RuntimeError( + 'Internal Error: The current inputs have not been properly ' + 'generated. First call features_and_labels, then call signals.') + signals = self._current_inputs['signals'] + self._current_inputs = None + return signals + + @staticmethod + def insert_stopping_signal(stop, batch_size, add_padding=False): + """Inserts stopping_signal into dataset via _map_fn. + + Here we change the data structure in the dataset, such that the return value + is a dictionary now and `features`, `labels`, and `signals` are three + distinguished keys in that dict. This provides a better structure, which + eases the process to decompose the inputs (see `features_and_labels`). + + Args: + stop: bool, state of current stopping signals. + batch_size: int, batch size. + add_padding: bool, whether to pad the tensor to full batch size. + + Returns: + A map_fn passed to dataset.map API. + """ + + def _map_fn(*args): + """The map fn to insert signals.""" + if len(args) == 1: + # Unpack the single Tensor/dict argument as features. This is required + # for the input_fn returns no labels. + args = args[0] + features, labels = _Inputs._parse_inputs(args) + new_input_dict = {} + + if add_padding: + padding_mask, features, labels = ( + _PaddingSignals.pad_features_and_labels(features, labels, + batch_size)) + + new_input_dict['features'] = features + if labels is not None: + new_input_dict['labels'] = labels + + else: + new_input_dict['features'] = features + if labels is not None: + new_input_dict['labels'] = labels + padding_mask = None + + new_input_dict['signals'] = _StopSignals( + stop=stop, batch_size=batch_size, + padding_mask=padding_mask).as_dict() + + return new_input_dict + + return _map_fn + + +class _StopSignals(object): + """Signals class holding all logic to handle TPU stopping condition.""" + + NON_STOPPING_SIGNAL = False + STOPPING_SIGNAL = True + + def __init__(self, stop, batch_size, padding_mask=None): + self._stop = stop + self._batch_size = batch_size + self._padding_mask = padding_mask + + def as_dict(self): + """Returns the signals as Python dict.""" + shape = [self._batch_size, 1] + dtype = tf.dtypes.bool + + if self._stop: + stopping = tf.ones(shape=shape, dtype=dtype) + else: + stopping = tf.zeros(shape=shape, dtype=dtype) + + signals = {'stopping': stopping} + if self._padding_mask is not None: + signals['padding_mask'] = self._padding_mask + return signals + + @staticmethod + def as_scalar_stopping_signal(signals): + return tf.identity(signals['stopping'][0][0]) + + @staticmethod + def should_stop(scalar_stopping_signal): + """Detects whether scalar_stopping_signal indicates stopping.""" + if isinstance(scalar_stopping_signal, tf.Tensor): + # STOPPING_SIGNAL is a constant True. Here, the logical_and is just the TF + # way to express the bool check whether scalar_stopping_signal is True. + return tf.math.logical_and(scalar_stopping_signal, + _StopSignals.STOPPING_SIGNAL) + else: + # For non Tensor case, it is used in SessionRunHook. So, we cannot modify + # the graph anymore. Here, we use pure Python. + return bool(scalar_stopping_signal) + + +class _PaddingSignals(object): + """Signals class holding all logic to handle padding.""" + + @staticmethod + def pad_features_and_labels(features, labels, batch_size): + """Pads out the batch dimension of features and labels.""" + real_batch_size = tf.compat.v1.shape( + _PaddingSignals._find_any_tensor(features))[0] + + batch_size_tensor = tf.constant(batch_size, tf.dtypes.int32) + + check_greater = tf.compat.v1.debugging.assert_greater_equal( + batch_size_tensor, + real_batch_size, + data=(batch_size_tensor, real_batch_size), + message='The real batch size should not be greater than batch_size.') + + with tf.control_dependencies([check_greater]): + missing_count = batch_size_tensor - real_batch_size + + def pad_single_tensor(tensor): + """Pads out the batch dimension of a tensor to the complete batch_size.""" + rank = len(tensor.shape) + assert rank > 0 + padding = tf.stack([[0, missing_count]] + [[0, 0]] * (rank - 1)) + padded_shape = (batch_size,) + tuple(tensor.shape[1:]) + padded_tensor = tf.compat.v1.pad(tensor, padding) + padded_tensor.set_shape(padded_shape) + return padded_tensor + + def nest_pad(tensor_or_dict): + return tf.nest.map_structure(pad_single_tensor, tensor_or_dict) + + features = nest_pad(features) + if labels is not None: + labels = nest_pad(labels) + + padding_mask = _PaddingSignals._padding_mask(real_batch_size, missing_count, + batch_size) + + return padding_mask, features, labels + + @staticmethod + def slice_tensor_or_dict(tensor_or_dict, signals): + """Slice the real Tensors according to padding mask in signals.""" + + padding_mask = signals['padding_mask'] + batch_size = tf.compat.v1.shape(padding_mask)[0] + + def verify_batch_size(tensor): + check_batch_size = tf.math.equal(batch_size, tensor.shape[0]) + with tf.control_dependencies([check_batch_size]): + return tf.identity(tensor) + + def slice_single_tensor(tensor): + rank = len(tensor.shape) + assert rank > 0 + real_batch_size = batch_size - tf.math.reduce_sum(padding_mask) + return verify_batch_size(tensor)[0:real_batch_size] + + # As we split the Tensors to all TPU cores and concat them back, it is + # important to ensure the real data is placed before padded ones, i.e., + # order is preserved. By that, the sliced padding mask should have all 0's. + # If this assertion failed, # the slice logic here would not hold. + sliced_padding_mask = slice_single_tensor(padding_mask) + assert_padding_mask = tf.math.equal( + tf.math.reduce_sum(sliced_padding_mask), 0) + + with tf.control_dependencies([assert_padding_mask]): + should_stop = _StopSignals.should_stop( + _StopSignals.as_scalar_stopping_signal(signals)) + + is_full_batch = tf.math.equal(tf.math.reduce_sum(padding_mask), 0) + + def slice_fn(tensor): + # If the current batch is full batch or part of stopping signals, we do + # not need to slice to save performance. + return tf.compat.v1.cond( + tf.math.logical_or(should_stop, is_full_batch), + (lambda: verify_batch_size(tensor)), + (lambda: slice_single_tensor(tensor))) + + return tf.nest.map_structure(slice_fn, tensor_or_dict) + + @staticmethod + def _find_any_tensor(batch_features): + tensors = [ + x for x in tf.nest.flatten(batch_features) if isinstance(x, tf.Tensor) + ] + if not tensors: + raise ValueError('Cannot find any Tensor in features dict.') + return tensors[0] + + @staticmethod + def _padding_mask(real_batch_size, missing_count, batch_size): + padding_mask = tf.concat([ + tf.zeros((real_batch_size,), dtype=tf.dtypes.int32), + tf.ones((missing_count,), dtype=tf.dtypes.int32) + ], + axis=0) + padding_mask.set_shape((batch_size,)) + return padding_mask + + +def _verify_cross_hosts_transfer_size(tensor_dict, message): + total_size = 0 + tensor_structure = {} + for key, tensor in tensor_dict.items(): + shape = tensor.shape + size = np.product(shape) * tensor.dtype.size + tensor_structure[key] = shape + total_size += size + if total_size >= _ONE_GIGABYTE: + raise ValueError( + '{} The transfer size is larger than the protobuf limit. Please ' + 'consider to use Tensors with smaller shapes or reduce batch ' + 'size. Given:\n' + '{}'.format( + message, '\n'.join([ + ' -- Key: {}, Shape: {}'.format(k, v) + for k, v in tensor_structure.items() + ]))) + + +def _add_item_to_params(params, key, value): + """Adds a new item into `params`.""" + if hasattr(params, 'set_hparam'): + # For HParams, we need to use special API. + if key in params: + params.set_hparam(key, value) + else: + params.add_hparam(key, value) + else: + # Now params is Python dict. + params[key] = value + + +def export_estimator_savedmodel(estimator, + export_dir_base, + serving_input_receiver_fn, + assets_extra=None, + as_text=False, + checkpoint_path=None): + """Export `Estimator` trained model for TPU inference. + + Args: + estimator: `Estimator` with which model has been trained. + export_dir_base: A string containing a directory in which to create + timestamped subdirectories containing exported SavedModels. + serving_input_receiver_fn: A function that takes no argument and returns a + `ServingInputReceiver` or `TensorServingInputReceiver`. + assets_extra: A dict specifying how to populate the assets.extra directory + within the exported SavedModel, or `None` if no extra assets are needed. + as_text: whether to write the SavedModel proto in text format. + checkpoint_path: The checkpoint path to export. If `None` (the default), + the most recent checkpoint found within the model directory is chosen. + + Returns: + The string path to the exported directory. + """ + # `TPUEstimator` requires `tpu_config.RunConfig`, so we cannot use + # `estimator.config`. + config = tpu_config.RunConfig(model_dir=estimator.model_dir) + est = TPUEstimator( + estimator._model_fn, # pylint: disable=protected-access + config=config, + params=estimator.params, + use_tpu=True, + train_batch_size=2048, # Does not matter. + eval_batch_size=2048, # Does not matter. + ) + return est.export_saved_model(export_dir_base, serving_input_receiver_fn, + assets_extra, as_text, checkpoint_path) + + +def model_fn_inference_on_tpu(model_fn, + features, + labels=None, + config=None, + params=None, + batch_config=None): + """Convenience wrapper for export_saved_model API v2 for a model_fn. + WARNING:THIS METHOD IS DEPRECATED AND NOT PART OF THE APIS. + + Make sure to set + `export_saved_model_api_version=tpu_estimator.ExportSavedModelApiVersion.V2` + when initializing TPUEstimator (default API version is V1). This is because + 1) `tpu.rewrite` (or `tpu.compile`) shouldn't be called in a nested way + (otherwise validation will throw error like + "NotImplementedError: tpu_shard_context cannot be nested.") + 2) When using V1 API, Estimator calls `tpu.rewrite` so + using `model_fn_inference_on_tpu` will trigger a nested call. + When using V2 API, users of Estimator needs to call `tpu.rewrite` (which + the wrapper does). + + It attempts to execute the entire model function on the TPU for prediction. + Note that this does not support features which are SparseTensors. If you have + SparseTensor features, consider partitioning your model function further and + use inference_on_tpu. + + Args: + model_fn: the model_fn for which we want to inference on TPU. + features: a tensor or dict of tensors, serves as the feature inputs to the + model. + labels: a tensor or dict of tensors, serves as the labels inputs to the + model. + config: auxiliary config to the Estimator. + params: hparams that we want to pass to the model_fn. + batch_config: a named tuple to wrap the inference batching configuration + inputs. + + Returns: + An EstimatorSpec containing the outputs in export_outputs and predictions. + """ + computation, capture = _build_computation_for_inference( + model_fn, labels, config, params) + tensors = call_computation(features, computation, batch_config=batch_config) + estimator_spec, export_outputs_dict, predictions_dict, none_indices = ( + capture.get()) + predictions_list = tensors[:len(predictions_dict)] + export_outputs_list_without_none = tensors[len(predictions_dict):] + + # Reinsert `None`s which we've taken out in + # `_build_computation_for_inference()`. + export_outputs_list = [] + while none_indices or export_outputs_list_without_none: + if none_indices and none_indices[0] == len(export_outputs_list): + export_outputs_list.append(None) + none_indices.pop(0) + else: + export_outputs_list.append(export_outputs_list_without_none.pop(0)) + + # Reconstruct `export_outputs` with updated tensors. + new_export_outputs_dict = tf.nest.pack_sequence_as(export_outputs_dict, + export_outputs_list) + export_outputs = estimator_spec.export_outputs + new_export_outputs = collections.OrderedDict( + (k, _clone_export_output_with_tensors(export_outputs[k], v)) + for k, v in six.iteritems(new_export_outputs_dict)) + # Reconstruct `predictions` with updated tensors. + new_predictions = tf.nest.pack_sequence_as(predictions_dict, predictions_list) + if (len(new_predictions) == 1 and + _KEY_WHEN_PREDICTIONS_IS_A_TENSOR in new_predictions): + new_predictions = new_predictions[_KEY_WHEN_PREDICTIONS_IS_A_TENSOR] + + return estimator_spec._replace( + export_outputs=new_export_outputs, predictions=new_predictions) + + +def _build_computation_for_inference(model_fn, labels, config, params): + """Builds the computation with calls the model_fn for inference.""" + capture = _CapturedObject() + + def computation(computation_input): + """Computation to be passed to `TPUPartitionedCall()`.""" + tpu_computation, tpu_capture = _build_tpu_computation_for_inference( + model_fn, computation_input, labels, config, params) + + tensors_on_cpu = tf.compat.v1.tpu.rewrite(tpu_computation) + tpu.prune_unconnected_ops_from_xla(tf.compat.v1.get_default_graph()) + + (estimator_spec, export_outputs_dict, export_outputs_list, + predictions_dict) = ( + tpu_capture.get()) + predictions_list = tensors_on_cpu[:len(predictions_dict)] + export_outputs_tpu_on_cpu_list = tensors_on_cpu[len(predictions_dict):] + + # Reconstruct tensors used in export_outputs, with TPU tensors replaced + # with their CPU counterpart returned from `rewrite_for_inference()`. + # `function.Defun()` does not like `None`s in return values, so we leave + # `None`s out but record their positions for later reconstruction. + export_outputs_list_without_none = [] + none_indices = [] + for i, t in enumerate(export_outputs_list): + if t is None: + none_indices.append(i) + else: + export_outputs_list_without_none.append( + export_outputs_tpu_on_cpu_list.pop(0)) + + capture.capture( + (estimator_spec, export_outputs_dict, predictions_dict, none_indices)) + return predictions_list + export_outputs_list_without_none + + return computation, capture + + +def _build_tpu_computation_for_inference(model_fn, features, labels, config, + params): + """Builds the TPU computation for inference on TPU.""" + capture = _CapturedObject() + + def computation(): + """Compute tpu tensors used in export_outputs. + + Passed to rewrite_for_inference so that model_fn will be called under + the rewriting contexts. Only tpu tensors are returned, but export_outputs + and scaffold are captured. + + Returns: + A list of Tensors used in export_outputs and not marked for + outside_compilation. + """ + # We should only call model fn once and it should be inside `computation` + # so that building the graph will happen under `rewrite_for_inference`. + + model_fn_args = function_utils.fn_args(model_fn) + kwargs = {} + # Makes deep copy with `config` and params` in case user mutates them. + if 'labels' in model_fn_args: + kwargs['labels'] = labels + if 'mode' in model_fn_args: + kwargs['mode'] = model_fn_lib.ModeKeys.PREDICT + if 'config' in model_fn_args: + kwargs['config'] = config + if 'params' in model_fn_args: + kwargs['params'] = params + estimator_spec = model_fn(features, **kwargs) + + # We pick the TPU tensors out from `export_output` and later return them + # from `computation` for rewriting. + export_outputs_dict = collections.OrderedDict( + (k, _export_output_to_tensors(v)) + for k, v in six.iteritems(estimator_spec.export_outputs)) + export_outputs_list = tf.nest.flatten(export_outputs_dict) + export_outputs_tpu_list = [t for t in export_outputs_list if t is not None] + + if isinstance(estimator_spec.predictions, dict): + predictions_dict = collections.OrderedDict( + (k, v) for k, v in six.iteritems(estimator_spec.predictions)) + else: + predictions_dict = { + _KEY_WHEN_PREDICTIONS_IS_A_TENSOR: estimator_spec.predictions + } + predictions_list = tf.nest.flatten(predictions_dict) + + # We cannot return everything we want through the return values, so + # capture the rest here for later use. + capture.capture((estimator_spec, export_outputs_dict, export_outputs_list, + predictions_dict)) + return predictions_list + export_outputs_tpu_list + + return computation, capture + + +def inference_on_tpu(computation, + inputs_to_tpu, + num_batch_threads, + max_batch_size, + batch_timeout_micros, + allowed_batch_sizes=None, + max_enqueued_batches=100): + """Convenient wrapper for export_saved_model API v2 to wrap TPU computation. + + WARNING: THIS METHOD IS DEPRECATED AND NOT PART OF THE APIS. + + Make sure to set + `export_saved_model_api_version=tpu_estimator.ExportSavedModelApiVersion.V2` + when initializing TPUEstimator (default API version is V1). This is because + 1) `tpu.rewrite` (or `tpu.compile`) shouldn't be called in a nested way + (otherwise validation will throw error like + "NotImplementedError: tpu_shard_context cannot be nested.") + 2) When using V1 API, Estimator calls `tpu.rewrite` so + using `model_fn_inference_on_tpu` will trigger a nested call. + When using V2 API, users of Estimator needs to call `tpu.rewrite` (which + the wrapper does). + + It puts computation on TPU, add batching around it and round robin computation + between TPU cores. + + See tpu_estimator_test.py for an example. + + Args: + computation: computation to be put on TPU, which takes inputs_to_tpu as + arguments. + inputs_to_tpu: a list of tensors as input to computation. + num_batch_threads: Number of scheduling threads for processing batches of + work. Determines the number of batches processed in parallel. + max_batch_size: Batch sizes will never be bigger than this. If None or 0, + no batching will done. + batch_timeout_micros: Maximum number of microseconds to wait before + outputting an incomplete batch. + allowed_batch_sizes: Optional list of allowed batch sizes. If left empty, + does nothing. Otherwise, supplies a list of batch sizes, causing the op to + pad batches up to one of those sizes. The entries must increase + monotonically, and the final entry must equal max_batch_size. + max_enqueued_batches: The maximum depth of the batch queue. Defaults to 100. + + Returns: + The unbatched computation output Tensors. + """ + + def _tpu_call(args): + """Function to either call or feed into BatchFunction.""" + + @function.Defun(capture_resource_var_by_value=False) + def tpu_computation(): + """Function to feed into the TPUPartitionedCallOp.""" + tensors_on_cpu = tf.compat.v1.tpu.rewrite(computation, args) + tpu.prune_unconnected_ops_from_xla(tf.compat.v1.get_default_graph()) + return tensors_on_cpu + + return tpu_functional.TPUPartitionedCall( + args=tpu_computation.captured_inputs, + device_ordinal=tpu_ops.tpu_ordinal_selector(), + Tout=[o.type for o in tpu_computation.definition.signature.output_arg], + f=tpu_computation) + + if not max_batch_size: + return _tpu_call(inputs_to_tpu) + + @tf.nondifferentiable_batch_function(num_batch_threads, max_batch_size, + batch_timeout_micros, + allowed_batch_sizes, + max_enqueued_batches) + def batched_tpu_computation(*args): + """Function to feed into the BatchOp.""" + return _tpu_call(args) + + return batched_tpu_computation(*inputs_to_tpu) diff --git a/exp-4-bert-squad/tokenization.py b/exp-4-bert-squad/tokenization.py new file mode 100644 index 0000000..52c92ad --- /dev/null +++ b/exp-4-bert-squad/tokenization.py @@ -0,0 +1,399 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tokenization classes.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import collections +import re +import unicodedata +import six +import tensorflow as tf + + +def validate_case_matches_checkpoint(do_lower_case, init_checkpoint): + """Checks whether the casing config is consistent with the checkpoint name.""" + + # The casing has to be passed in by the user and there is no explicit check + # as to whether it matches the checkpoint. The casing information probably + # should have been stored in the bert_config.json file, but it's not, so + # we have to heuristically detect it to validate. + + if not init_checkpoint: + return + + m = re.match("^.*?([A-Za-z0-9_-]+)/bert_model.ckpt", init_checkpoint) + if m is None: + return + + model_name = m.group(1) + + lower_models = [ + "uncased_L-24_H-1024_A-16", "uncased_L-12_H-768_A-12", + "multilingual_L-12_H-768_A-12", "chinese_L-12_H-768_A-12" + ] + + cased_models = [ + "cased_L-12_H-768_A-12", "cased_L-24_H-1024_A-16", + "multi_cased_L-12_H-768_A-12" + ] + + is_bad_config = False + if model_name in lower_models and not do_lower_case: + is_bad_config = True + actual_flag = "False" + case_name = "lowercased" + opposite_flag = "True" + + if model_name in cased_models and do_lower_case: + is_bad_config = True + actual_flag = "True" + case_name = "cased" + opposite_flag = "False" + + if is_bad_config: + raise ValueError( + "You passed in `--do_lower_case=%s` with `--init_checkpoint=%s`. " + "However, `%s` seems to be a %s model, so you " + "should pass in `--do_lower_case=%s` so that the fine-tuning matches " + "how the model was pre-training. If this error is wrong, please " + "just comment out this check." % (actual_flag, init_checkpoint, + model_name, case_name, opposite_flag)) + + +def convert_to_unicode(text): + """Converts `text` to Unicode (if it's not already), assuming utf-8 input.""" + if six.PY3: + if isinstance(text, str): + return text + elif isinstance(text, bytes): + return text.decode("utf-8", "ignore") + else: + raise ValueError("Unsupported string type: %s" % (type(text))) + elif six.PY2: + if isinstance(text, str): + return text.decode("utf-8", "ignore") + elif isinstance(text, unicode): + return text + else: + raise ValueError("Unsupported string type: %s" % (type(text))) + else: + raise ValueError("Not running on Python2 or Python 3?") + + +def printable_text(text): + """Returns text encoded in a way suitable for print or `tf.logging`.""" + + # These functions want `str` for both Python2 and Python3, but in one case + # it's a Unicode string and in the other it's a byte string. + if six.PY3: + if isinstance(text, str): + return text + elif isinstance(text, bytes): + return text.decode("utf-8", "ignore") + else: + raise ValueError("Unsupported string type: %s" % (type(text))) + elif six.PY2: + if isinstance(text, str): + return text + elif isinstance(text, unicode): + return text.encode("utf-8") + else: + raise ValueError("Unsupported string type: %s" % (type(text))) + else: + raise ValueError("Not running on Python2 or Python 3?") + + +def load_vocab(vocab_file): + """Loads a vocabulary file into a dictionary.""" + vocab = collections.OrderedDict() + index = 0 + with tf.io.gfile.GFile(vocab_file, "r") as reader: + while True: + token = convert_to_unicode(reader.readline()) + if not token: + break + token = token.strip() + vocab[token] = index + index += 1 + return vocab + + +def convert_by_vocab(vocab, items): + """Converts a sequence of [tokens|ids] using the vocab.""" + output = [] + for item in items: + output.append(vocab[item]) + return output + + +def convert_tokens_to_ids(vocab, tokens): + return convert_by_vocab(vocab, tokens) + + +def convert_ids_to_tokens(inv_vocab, ids): + return convert_by_vocab(inv_vocab, ids) + + +def whitespace_tokenize(text): + """Runs basic whitespace cleaning and splitting on a piece of text.""" + text = text.strip() + if not text: + return [] + tokens = text.split() + return tokens + + +class FullTokenizer(object): + """Runs end-to-end tokenziation.""" + + def __init__(self, vocab_file, do_lower_case=True): + self.vocab = load_vocab(vocab_file) + self.inv_vocab = {v: k for k, v in self.vocab.items()} + self.basic_tokenizer = BasicTokenizer(do_lower_case=do_lower_case) + self.wordpiece_tokenizer = WordpieceTokenizer(vocab=self.vocab) + + def tokenize(self, text): + split_tokens = [] + for token in self.basic_tokenizer.tokenize(text): + for sub_token in self.wordpiece_tokenizer.tokenize(token): + split_tokens.append(sub_token) + + return split_tokens + + def convert_tokens_to_ids(self, tokens): + return convert_by_vocab(self.vocab, tokens) + + def convert_ids_to_tokens(self, ids): + return convert_by_vocab(self.inv_vocab, ids) + + +class BasicTokenizer(object): + """Runs basic tokenization (punctuation splitting, lower casing, etc.).""" + + def __init__(self, do_lower_case=True): + """Constructs a BasicTokenizer. + + Args: + do_lower_case: Whether to lower case the input. + """ + self.do_lower_case = do_lower_case + + def tokenize(self, text): + """Tokenizes a piece of text.""" + text = convert_to_unicode(text) + text = self._clean_text(text) + + # This was added on November 1st, 2018 for the multilingual and Chinese + # models. This is also applied to the English models now, but it doesn't + # matter since the English models were not trained on any Chinese data + # and generally don't have any Chinese data in them (there are Chinese + # characters in the vocabulary because Wikipedia does have some Chinese + # words in the English Wikipedia.). + text = self._tokenize_chinese_chars(text) + + orig_tokens = whitespace_tokenize(text) + split_tokens = [] + for token in orig_tokens: + if self.do_lower_case: + token = token.lower() + token = self._run_strip_accents(token) + split_tokens.extend(self._run_split_on_punc(token)) + + output_tokens = whitespace_tokenize(" ".join(split_tokens)) + return output_tokens + + def _run_strip_accents(self, text): + """Strips accents from a piece of text.""" + text = unicodedata.normalize("NFD", text) + output = [] + for char in text: + cat = unicodedata.category(char) + if cat == "Mn": + continue + output.append(char) + return "".join(output) + + def _run_split_on_punc(self, text): + """Splits punctuation on a piece of text.""" + chars = list(text) + i = 0 + start_new_word = True + output = [] + while i < len(chars): + char = chars[i] + if _is_punctuation(char): + output.append([char]) + start_new_word = True + else: + if start_new_word: + output.append([]) + start_new_word = False + output[-1].append(char) + i += 1 + + return ["".join(x) for x in output] + + def _tokenize_chinese_chars(self, text): + """Adds whitespace around any CJK character.""" + output = [] + for char in text: + cp = ord(char) + if self._is_chinese_char(cp): + output.append(" ") + output.append(char) + output.append(" ") + else: + output.append(char) + return "".join(output) + + def _is_chinese_char(self, cp): + """Checks whether CP is the codepoint of a CJK character.""" + # This defines a "chinese character" as anything in the CJK Unicode block: + # https://en.wikipedia.org/wiki/CJK_Unified_Ideographs_(Unicode_block) + # + # Note that the CJK Unicode block is NOT all Japanese and Korean characters, + # despite its name. The modern Korean Hangul alphabet is a different block, + # as is Japanese Hiragana and Katakana. Those alphabets are used to write + # space-separated words, so they are not treated specially and handled + # like the all of the other languages. + if ((cp >= 0x4E00 and cp <= 0x9FFF) or # + (cp >= 0x3400 and cp <= 0x4DBF) or # + (cp >= 0x20000 and cp <= 0x2A6DF) or # + (cp >= 0x2A700 and cp <= 0x2B73F) or # + (cp >= 0x2B740 and cp <= 0x2B81F) or # + (cp >= 0x2B820 and cp <= 0x2CEAF) or + (cp >= 0xF900 and cp <= 0xFAFF) or # + (cp >= 0x2F800 and cp <= 0x2FA1F)): # + return True + + return False + + def _clean_text(self, text): + """Performs invalid character removal and whitespace cleanup on text.""" + output = [] + for char in text: + cp = ord(char) + if cp == 0 or cp == 0xfffd or _is_control(char): + continue + if _is_whitespace(char): + output.append(" ") + else: + output.append(char) + return "".join(output) + + +class WordpieceTokenizer(object): + """Runs WordPiece tokenziation.""" + + def __init__(self, vocab, unk_token="[UNK]", max_input_chars_per_word=200): + self.vocab = vocab + self.unk_token = unk_token + self.max_input_chars_per_word = max_input_chars_per_word + + def tokenize(self, text): + """Tokenizes a piece of text into its word pieces. + + This uses a greedy longest-match-first algorithm to perform tokenization + using the given vocabulary. + + For example: + input = "unaffable" + output = ["un", "##aff", "##able"] + + Args: + text: A single token or whitespace separated tokens. This should have + already been passed through `BasicTokenizer. + + Returns: + A list of wordpiece tokens. + """ + + text = convert_to_unicode(text) + + output_tokens = [] + for token in whitespace_tokenize(text): + chars = list(token) + if len(chars) > self.max_input_chars_per_word: + output_tokens.append(self.unk_token) + continue + + is_bad = False + start = 0 + sub_tokens = [] + while start < len(chars): + end = len(chars) + cur_substr = None + while start < end: + substr = "".join(chars[start:end]) + if start > 0: + substr = "##" + substr + if substr in self.vocab: + cur_substr = substr + break + end -= 1 + if cur_substr is None: + is_bad = True + break + sub_tokens.append(cur_substr) + start = end + + if is_bad: + output_tokens.append(self.unk_token) + else: + output_tokens.extend(sub_tokens) + return output_tokens + + +def _is_whitespace(char): + """Checks whether `chars` is a whitespace character.""" + # \t, \n, and \r are technically contorl characters but we treat them + # as whitespace since they are generally considered as such. + if char == " " or char == "\t" or char == "\n" or char == "\r": + return True + cat = unicodedata.category(char) + if cat == "Zs": + return True + return False + + +def _is_control(char): + """Checks whether `chars` is a control character.""" + # These are technically control characters but we count them as whitespace + # characters. + if char == "\t" or char == "\n" or char == "\r": + return False + cat = unicodedata.category(char) + if cat in ("Cc", "Cf"): + return True + return False + + +def _is_punctuation(char): + """Checks whether `chars` is a punctuation character.""" + cp = ord(char) + # We treat all non-letter/number ASCII as punctuation. + # Characters such as "^", "$", and "`" are not in the Unicode + # Punctuation class but we treat them as punctuation anyways, for + # consistency. + if ((cp >= 33 and cp <= 47) or (cp >= 58 and cp <= 64) or + (cp >= 91 and cp <= 96) or (cp >= 123 and cp <= 126)): + return True + cat = unicodedata.category(char) + if cat.startswith("P"): + return True + return False diff --git a/exp-4-bert-squad/tokenization_test.py b/exp-4-bert-squad/tokenization_test.py new file mode 100644 index 0000000..0afaedd --- /dev/null +++ b/exp-4-bert-squad/tokenization_test.py @@ -0,0 +1,137 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import tempfile +import tokenization +import six +import tensorflow as tf + + +class TokenizationTest(tf.test.TestCase): + + def test_full_tokenizer(self): + vocab_tokens = [ + "[UNK]", "[CLS]", "[SEP]", "want", "##want", "##ed", "wa", "un", "runn", + "##ing", "," + ] + with tempfile.NamedTemporaryFile(delete=False) as vocab_writer: + if six.PY2: + vocab_writer.write("".join([x + "\n" for x in vocab_tokens])) + else: + vocab_writer.write("".join( + [x + "\n" for x in vocab_tokens]).encode("utf-8")) + + vocab_file = vocab_writer.name + + tokenizer = tokenization.FullTokenizer(vocab_file) + os.unlink(vocab_file) + + tokens = tokenizer.tokenize(u"UNwant\u00E9d,running") + self.assertAllEqual(tokens, ["un", "##want", "##ed", ",", "runn", "##ing"]) + + self.assertAllEqual( + tokenizer.convert_tokens_to_ids(tokens), [7, 4, 5, 10, 8, 9]) + + def test_chinese(self): + tokenizer = tokenization.BasicTokenizer() + + self.assertAllEqual( + tokenizer.tokenize(u"ah\u535A\u63A8zz"), + [u"ah", u"\u535A", u"\u63A8", u"zz"]) + + def test_basic_tokenizer_lower(self): + tokenizer = tokenization.BasicTokenizer(do_lower_case=True) + + self.assertAllEqual( + tokenizer.tokenize(u" \tHeLLo!how \n Are yoU? "), + ["hello", "!", "how", "are", "you", "?"]) + self.assertAllEqual(tokenizer.tokenize(u"H\u00E9llo"), ["hello"]) + + def test_basic_tokenizer_no_lower(self): + tokenizer = tokenization.BasicTokenizer(do_lower_case=False) + + self.assertAllEqual( + tokenizer.tokenize(u" \tHeLLo!how \n Are yoU? "), + ["HeLLo", "!", "how", "Are", "yoU", "?"]) + + def test_wordpiece_tokenizer(self): + vocab_tokens = [ + "[UNK]", "[CLS]", "[SEP]", "want", "##want", "##ed", "wa", "un", "runn", + "##ing" + ] + + vocab = {} + for (i, token) in enumerate(vocab_tokens): + vocab[token] = i + tokenizer = tokenization.WordpieceTokenizer(vocab=vocab) + + self.assertAllEqual(tokenizer.tokenize(""), []) + + self.assertAllEqual( + tokenizer.tokenize("unwanted running"), + ["un", "##want", "##ed", "runn", "##ing"]) + + self.assertAllEqual( + tokenizer.tokenize("unwantedX running"), ["[UNK]", "runn", "##ing"]) + + def test_convert_tokens_to_ids(self): + vocab_tokens = [ + "[UNK]", "[CLS]", "[SEP]", "want", "##want", "##ed", "wa", "un", "runn", + "##ing" + ] + + vocab = {} + for (i, token) in enumerate(vocab_tokens): + vocab[token] = i + + self.assertAllEqual( + tokenization.convert_tokens_to_ids( + vocab, ["un", "##want", "##ed", "runn", "##ing"]), [7, 4, 5, 8, 9]) + + def test_is_whitespace(self): + self.assertTrue(tokenization._is_whitespace(u" ")) + self.assertTrue(tokenization._is_whitespace(u"\t")) + self.assertTrue(tokenization._is_whitespace(u"\r")) + self.assertTrue(tokenization._is_whitespace(u"\n")) + self.assertTrue(tokenization._is_whitespace(u"\u00A0")) + + self.assertFalse(tokenization._is_whitespace(u"A")) + self.assertFalse(tokenization._is_whitespace(u"-")) + + def test_is_control(self): + self.assertTrue(tokenization._is_control(u"\u0005")) + + self.assertFalse(tokenization._is_control(u"A")) + self.assertFalse(tokenization._is_control(u" ")) + self.assertFalse(tokenization._is_control(u"\t")) + self.assertFalse(tokenization._is_control(u"\r")) + self.assertFalse(tokenization._is_control(u"\U0001F4A9")) + + def test_is_punctuation(self): + self.assertTrue(tokenization._is_punctuation(u"-")) + self.assertTrue(tokenization._is_punctuation(u"$")) + self.assertTrue(tokenization._is_punctuation(u"`")) + self.assertTrue(tokenization._is_punctuation(u".")) + + self.assertFalse(tokenization._is_punctuation(u"A")) + self.assertFalse(tokenization._is_punctuation(u" ")) + + +if __name__ == "__main__": + tf.test.main() diff --git a/experiment.sh b/experiment.sh new file mode 100644 index 0000000..75a94bd --- /dev/null +++ b/experiment.sh @@ -0,0 +1,285 @@ +#!/usr/bin/env bash + + +# ------------------- SET BY THE USER ----------------------- # + +# indicate which folders contain the data for the training: +export BERT_BASE_DIR=/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12 +export SQUAD_DIR=/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/data +export IMAGE_NET_DIR=/media/demouser/0434B71B34B70F24/image_net + +# choose which evaluation tools or methods you want to include in the experiments +calc_list=( +'code_carbon:online' \ +'carbon_tracker:measure' \ +'carbon_tracker:predict' \ +'eco2ai' \ +'green_algorithms:default' \ +'green_algorithms:automated_parallel' \ +'tapo' +) +calc_list=( +'green_algorithms:default' +) + +# choose if you want to use a GPU for the training: +accelerator_list=('True') + +# choose the number of iterations you want to do during the experiments: +nbIter=2 + +# here change the exp name (image_net, SQUAD-v1-1, SQUAD-extracted, idle, mnist, etc): +exp_list=('image_net') + +# if dev_test is "True", less epochs will be used for the training (for tests): +dev_test="True" + +# ------------------- SET BY THE USER ----------------------- # + + + + +# ----------------- UNCOMMENT FOR EXPERIMENTS -------------------- # + +sudo chmod o+r /sys/class/powercap/intel-rapl\:0/energy_uj + +sudo chmod a+r /dev/cpu/*/msr +sudo setcap cap_sys_rawio=ep /usr/sbin/rdmsr + +# Needed ? +# export OUTPUT_ESTIMATOR=./output/calculator_output + +# ----------------- UNCOMMENT FOR EXPERIMENTS -------------------- # + + + +# ------------------- USEFULL VAR AND FCTS ----------------------- # + +declare -A scripts=( ['mnist']='exp-1-MNIST/MNIST-ALL' \ +['cifar10']='exp-2-CIFAR10/CIFAR10-ALL' \ +['CUB_200_2011']='exp-3-resnet18/classification_pytorch_vision' \ +['image_net']='exp-3-resnet18/classification_pytorch_vision' \ +['SQUAD-extracted']='exp-4-bert-squad/run_squad' \ +['SQUAD-v1-1']='exp-4-bert-squad/run_squad' \ +['idle']='exp-0-idle/idle' ) + +declare -A acc_list=( [False]='cpu' [True]='cuda') + +declare -A squad_data=( ["training"]="train" \ +["inference"]="dev" \ +["SQUAD-extracted"]="extracted" \ +["SQUAD-v1-1"]="v1.1" ) + +python initialisation_experiment.py + +path_logs_and_results=$(cat logs_and_results/current_logs_and_results_folder.txt) +# x-terminal-emulator -e "python TAPO-VAR-initialisation.py" # if we want each ml task to open in a different terminal +echo $path_logs_and_results + + +one_monitoring_step () { + # Function body + + echo $calc_and_mode + + calc=$(echo $calc_and_mode | cut -d':' -f1) + mode=$(echo $calc_and_mode | cut -d':' -f2) + + script=${scripts[$exp]} + acc=${acc_list[$useAcc]} + echo $exp $folder $i $calc $mode $acc + file="${script}.py" + + + opt1="--use_accelerator=${useAcc}" + opt2="--save_model=False" + opt3="--calculator=${calc}" + opt4="--calculator_mode=${mode}" + opt5="--ml_phase=${ml}" + opt6="--dev_test=${dev_test}" + opt7="--name_exp=${exp}" + opt="${opt1} ${opt2} ${opt3} ${opt4} ${opt5} ${opt6} ${opt7}" + + if [[ $exp == 'mnist' ]]; then + if [[ $dev_test == 'True' ]]; then + opt7="--nb_batch_inferences=10" + opt8="--epochs=2" + opt="${opt} ${opt7} ${opt8}" + fi + if [[ $ml == 'training' ]]; then + mkdir "${path_logs_and_results}/${i}/${exp}_model" + opt9="--output_dir=${path_logs_and_results}/${i}/${exp}_model" + opt="${opt} ${opt9}" + fi + fi + + if [[ $exp == 'cifar10' ]]; then + if [[ $dev_test == 'True' ]]; then + opt7="--nb_batch_inferences=50" + opt8="--epochs=2" + opt="${opt} ${opt7} ${opt8}" + fi + if [[ $ml == 'training' ]]; then + mkdir "${path_logs_and_results}/${i}/${exp}_model" + opt9="--output_dir=${path_logs_and_results}/${i}/${exp}_model" + opt="${opt} ${opt9}" + fi + fi + + if [[ $exp == 'CUB_200_2011' || $exp == 'image_net' ]]; then + if [[ $dev_test == 'True' ]]; then + opt7="--nb_batch_inferences=5" + opt8="--epochs=1" + opt="${opt} ${opt7} ${opt8}" + else + opt7="--nb_batch_inferences=5" + opt8="--epochs=2" + opt="${opt} ${opt7} ${opt8}" + fi + if [[ $exp == 'image_net' ]]; then + opt9="--data-path=${IMAGE_NET_DIR}" + else + opt9="--data-path=data/${exp}" + fi + if [[ $ml == 'training' ]]; then + mkdir "${path_logs_and_results}/${i}/${exp}_model" + opt10="--output-dir=${path_logs_and_results}/${i}/${exp}_model" + fi + opt="${opt} ${opt9} ${opt10}" + + o1="--batch-size=16 --workers=8" + opt="${opt} ${o1} ${o2} ${o3} ${o4} ${o5} ${o6} ${o7}" + fi + + if [[ $exp == "SQUAD-extracted" || $exp == "SQUAD-v1-1" ]]; then + file_name="$SQUAD_DIR/train-${squad_data[$exp]}" + opt7="--train_file=${file_name}.json" + file_name="$SQUAD_DIR/dev-${squad_data[$exp]}" + opt8="--predict_file=${file_name}.json" + + if [[ $ml == "training" ]]; then + opt9="--do_predict=False --do_train=True" + mkdir "${path_logs_and_results}/${i}/${exp}_model" + opt10="--output_dir=${path_logs_and_results}/${i}/${exp}_model" + else + opt9="--do_predict=True --do_train=False" + mkdir "${path_logs_and_results}/{$i}/${exp}_inference" + opt10="--output_dir=${path_logs_and_results}/${i}/${exp}_inference" + fi + opt="${opt} ${opt7} ${opt8} ${opt9} ${opt10}" + if [[ $dev_test == True ]]; then + opt11="--num_train_epochs=1.0" + else + opt11="--num_train_epochs=2.0" + fi + opt="${opt} ${opt11} ${bert_opt}" + + o1="--train_batch_size=8" + o2="--learning_rate=3e-5" + o3="--vocab_file=$BERT_BASE_DIR/vocab.txt" + o4="--bert_config_file=$BERT_BASE_DIR/bert_config.json" + o5="--init_checkpoint=${BERT_BASE_DIR}/bert_model.ckpt" + o6="--doc_stride=128" + o7="--max_seq_length=128" + opt="${opt} ${o1} ${o2} ${o3} ${o4} ${o5} ${o6} ${o7}" + fi + opt="${opt} ${opt_path}" + } + +python experiment_completed.py --done=False + +ml="training" + +# ------------------- USEFULL VAR AND FCTS ----------------------- # + + + + + + + +# ------------------- EXPERIMENT LOOP ----------------------- # + +if [[ $1 != "" ]]; then + exp_list=($1) + calc_list=('no_calculator') + accelerator_list=('True') + nbIter=1 + ml="training" + dev_test="True" +fi + +idle_time=600 + +for i in `seq $nbIter`; do + + mkdir ${path_logs_and_results}/${i} + opt_path="--path_logs_and_results=${path_logs_and_results}/${i}" + mkdir ${path_logs_and_results}/${i}/carbon_tracker_measure_logs + mkdir ${path_logs_and_results}/${i}/carbon_tracker_predict_logs + mkdir ${path_logs_and_results}/${i}/tapo_logs + mkdir ${path_logs_and_results}/${i}/term_logs + mkdir ${path_logs_and_results}/${i}/util_logs + + + for exp in ${exp_list[@]}; do + + for useAcc in ${accelerator_list[@]}; do + shuf_calc_list=$(shuf -e "${calc_list[@]}") + + cpt=0 + for calc_and_mode in ${shuf_calc_list[@]}; do + + one_monitoring_step + + if [[ $exp == 'idle' ]]; then + cmd_idle="--idle_time=${idle_time}" + fi + + cmd="python ${file} ${opt} ${cmd_idle}" + echo $cmd + log_file="${path_logs_and_results}/${i}/term_logs/${i}-${cpt}-${exp}-${acc}-${calc}-${mode}" + + echo $calc + if [[ $calc == 'tapo' ]]; then + # x-terminal-emulator -e "./parallel-TAPO.sh '${cmd}' '${opt_path}' 2>&1 | tee ${log_file}.txt" # if we want each ml task to open in a different terminal + ./parallel-TAPO.sh "${cmd}" "${opt_path}" 2>&1 | tee ${log_file}.txt + elif [[ $calc == 'green_algorithms' && $mode == 'automated_parallel' ]]; then + # x-terminal-emulator -e "./parallel-GA.sh '${cmd}' '${opt_path}' 2>&1 | tee ${log_file}.txt" # if we want each ml task to open in a different terminal + ./parallel-GA.sh "${cmd}" "${opt_path}" 2>&1 | tee ${log_file}.txt + else + # x-terminal-emulator -e "${cmd} 2>&1 | tee ${log_file}.txt" # if we want each ml task to open in a different terminal + ${cmd} 2>&1 | tee ${log_file}.txt + fi + + cpt=$((cpt+1)) + + if [[ $exp == 'idle' ]]; then + sleep 2m + else + if [[ $dev_test == 'True' ]]; then + sleep 10s + else + sleep 10m + fi + fi + done + + if [[ $i == 1 && $exp != 'idle' ]]; then + calc_and_mode='flops' + one_monitoring_step + cmd="python ${file} ${opt}" + echo $cmd + log_file="${path_logs_and_results}/${i}/term_logs/${i}-${cpt}-${exp}-${acc}-flops-${mode}" + echo $calc + # x-terminal-emulator -e "${cmd} 2>&1 | tee ${log_file}.txt" # if we want each ml task to open in a different terminal + ${cmd} 2>&1 | tee ${log_file}.txt + fi + + done + done +done + +python experiment_completed.py --done=True + +# ------------------- EXPERIMENT LOOP ----------------------- # \ No newline at end of file diff --git a/experiment_completed.py b/experiment_completed.py new file mode 100644 index 0000000..23ccabf --- /dev/null +++ b/experiment_completed.py @@ -0,0 +1,9 @@ +from argparse import ArgumentParser + +parser = ArgumentParser() +parser.add_argument('--done', type=str, default="False", + help='is the exeriment finished') +args = parser.parse_args() + +with open('experiment_completed_VAR.txt', 'w') as file: + file.write(args.done) \ No newline at end of file diff --git a/experiment_completed_VAR.txt b/experiment_completed_VAR.txt new file mode 100644 index 0000000..4791ed5 --- /dev/null +++ b/experiment_completed_VAR.txt @@ -0,0 +1 @@ +True \ No newline at end of file diff --git a/fct_for_ES.py b/fct_for_ES.py new file mode 100644 index 0000000..0300001 --- /dev/null +++ b/fct_for_ES.py @@ -0,0 +1,64 @@ +import json +import os +import pprint +import datetime + +def fct_find_data(folder_traces): + """ find the data corresponing to the latest experiment """ + list_dir = [] + for file in os.listdir(folder_traces): + d = os.path.join(folder_traces, file) + if os.path.isdir(d): + if file.startswith("tmp"): + list_dir.append(file) + + tmp_file = max(list_dir) + + data_file = folder_traces+"/"+tmp_file+"/"+"energy_scope_eprofile_0.txt" + + with open(data_file, 'r') as f: + ES_data = json.load(f) + + return(ES_data) + + + +def fct_time_energy(ES_data, tag): + """ returns the duration in seconds of the tagged task """ + + ES_tag_data = ES_data['data']['data']['tags'][tag] + + + date_str2tuple = lambda st : tuple([int(x) for x in st[:10].split('/')])+tuple([int(x) for x in st[11:19].split(':')]) + stop_tuple= date_str2tuple(ES_tag_data['stop']) + # stop_sec_float = float('0.'+ES_tag_data['stop'][20:]) + + start_tuple= date_str2tuple(ES_tag_data['start']) + # start_sec_float = float('0.'+ES_tag_data['start'][20:]) + + stop_datetime = datetime.datetime(*stop_tuple) + start_datetime = datetime.datetime(*start_tuple) + + meas_time = (stop_datetime - start_datetime).total_seconds() + + meas_energy = float(ES_tag_data['joule(J)'])/3.6*10**(-6) # kWh + + return(meas_time, meas_energy) + + + + +# # print(datetime.strftime(ES_time_data['stop'] )) + +# # meas_time = ES_time_data['stop'] - ES_time_data['start'] + +# # meas_epochs = epochs +# # meas_time = +# # meas_energy = +# # meas_co2 = + +# # '2023/01/03 17:42:06.414168' + +# with open(folder_traces+'/energy_scope_enode_0_demouser-Alienware-Aurora-R9.txt', 'r') as f: +# text = json.load(f) +# pprint.pprint(text) diff --git a/fct_for_experiments.py b/fct_for_experiments.py new file mode 100644 index 0000000..d2e6530 --- /dev/null +++ b/fct_for_experiments.py @@ -0,0 +1,356 @@ +import sys +import os +_path = '.' +sys.path.append(os.path.join(_path)) +import json +import time + +# --- FOR CALCULATORS +from codecarbon import EmissionsTracker, OfflineEmissionsTracker +from carbontracker.tracker import CarbonTracker +import eco2ai +from fct_for_ga import wait_for_UTIL +from fct_for_tapo import wait_for_TAPO +# ------------------ + + +def new_timestamp(main_folder, iteration, tag): + + file_name = os.path.join(main_folder, 'timestamps.json') + + with open(file_name, 'r') as file: + d = json.load(file) + + if iteration not in d: + d[iteration] = {} + d[iteration][tag] = time.time() + + with open(file_name, 'w') as file: + json.dump(d, file, indent = 4) + + +class ExpParams(): + def __init__(self, args_parser): + + self.name = args_parser.name_exp + self.comp = args_parser.computer + self.name_calc = args_parser.calculator + self.mode_calc = args_parser.calculator_mode if args_parser.calculator_mode != self.name_calc else '' + self.path_logs_and_results = args_parser.path_logs_and_results + + tmp = self.path_logs_and_results + tmp = tmp.split("/") + self.main_folder = "/".join(tmp[:-1]) + self.iteration = tmp[-1] + + # timestamp - calculator iteration test start + tag = self.name_calc + ':' + self.mode_calc + ' test start' + new_timestamp(self.main_folder, self.iteration, tag) + + if self.name[:5] == "SQUAD": + import tensorflow as tf + self.use_accelerator = args_parser.use_accelerator + self.save_model = False + if args_parser.do_predict == True: + self.ml = "inference" + self.epochs = "N/A" + else: # we don't do both + self.ml = "training" + self.epochs = args_parser.num_train_epochs + self.dev_test = None + self.train_batch_size = args_parser.train_batch_size + self.test_batch_size = args_parser.predict_batch_size + else: + import torch + self.use_accelerator = bool(args_parser.use_accelerator == "True") + self.save_model = bool(args_parser.save_model == "True") + self.ml = args_parser.ml_phase + if self.ml == "inference": + self.epochs = "N/A" + else: + self.epochs = args_parser.epochs + self.dev_test = bool(args_parser.dev_test == "True") + if (self.name == 'cifar10') or (self.name == 'mnist'): + self.train_batch_size = args_parser.batch_size + self.test_batch_size = args_parser.test_batch_size + else: + self.train_batch_size = args_parser.batch_size + self.test_batch_size = args_parser.batch_size + + + + print('Experience: ', self.name) + print('Save model: ', self.save_model) + print('Accelerator: ', self.use_accelerator) + print('ML phase: ', self.ml) + print('Calculator: ', self.name_calc) + print('Dev test model: ', self.dev_test) + + self.measure = None + self.online = None + self.automated = None + self.parallel = None + + if self.name_calc == 'carbon_tracker': + self.measure = bool(args_parser.calculator_mode != "predict") + print('Calculator mode: ', args_parser.calculator_mode, ' measure=', self.measure) + if self.measure: + self.ct_log_dir = os.path.join(args_parser.path_logs_and_results, "carbon_tracker_measure_logs") + else: + self.ct_log_dir = os.path.join(args_parser.path_logs_and_results, "carbon_tracker_predict_logs") + elif self.name_calc == 'code_carbon': + self.online = bool(args_parser.calculator_mode != "offline") + print('Calculator mode: ', args_parser.calculator_mode, ' online=', self.online) + if self.online: + self.cc_output_file = os.path.join(args_parser.path_logs_and_results, "output_code_carbon_online.csv") + else: + self.cc_output_file = os.path.join(args_parser.path_logs_and_results, "output_code_carbon_offline.csv") + elif self.name_calc == 'green_algorithms': + self.automated = bool(args_parser.calculator_mode != "default") + print('Calculator mode: ', args_parser.calculator_mode, ' automated=', self.automated) + if self.automated: + self.parallel = bool(args_parser.calculator_mode == "automated_parallel") + print('Automated and parallel mode: ', self.parallel) + + self.eco2ai_output_file = os.path.join(args_parser.path_logs_and_results, "output_eco2ai.csv") + + if self.name == "SQUAD-extracted" or self.name == "SQUAD-v1-1": + # print('here') + # print(tf.config.list_physical_devices('GPU')) + # print(self.use_accelerator) + # print(tf.config.list_physical_devices('GPU') != []) + use_cuda = self.use_accelerator and (tf.config.list_physical_devices('GPU') != []) + if use_cuda: + os.environ['CUDA_VISIBLE_DEVICES'] = "0,1" + self.device_name = "cuda" + else: + os.environ['CUDA_VISIBLE_DEVICES'] = "" + self.device_name = "cpu" + else: + use_cuda = self.use_accelerator and torch.cuda.is_available() + use_mps = self.use_accelerator and torch.backends.mps.is_available() + if use_cuda: + self.device = torch.device("cuda") + elif use_mps: + self.device = torch.device("mps") + else: + self.device = torch.device("cpu") + self.device_name = self.device.type + print("Device is: ", self.device) + + if use_cuda: # rtx 2080 super info + TDP = 250 # Watts (=J per sec) + peak_flops = 11.15 # theorical TFLOPS (teraFLOPS, 10**12 FLOPS) + self.perf_per_watt = peak_flops/TDP*10**12 # FLOPs per J + else: # intel core i9-9900 + TDP = 95 # Watts + peak_flops = 460.8 # theorical GFLOPS (gigaFLOPS, 10**9 FLOPS) + self.perf_per_watt = peak_flops/TDP*10**9 + + + +def prepare_calculator(exp): + # Preparing the calculators # + + tracker = None + + if exp.name_calc == 'code_carbon': + output_file = exp.cc_output_file + tracking_mode = 'machine' + # tracking_mode = 'process' + if exp.online == True: + measure_power_secs = 2 + tracker = EmissionsTracker( + output_file = output_file, + measure_power_secs = measure_power_secs, + tracking_mode = tracking_mode) + else: + country_iso_code = 'FRA' + tracker = OfflineEmissionsTracker( + output_file = output_file, + country_iso_code = country_iso_code) + + elif exp.name_calc == 'carbon_tracker': + log_dir = exp.ct_log_dir + # # Delete the previous logs: + # for f in os.listdir(log_dir): + # if f != "readme.txt": + # os.remove(os.path.join(log_dir, f)) + update_interval = 2 # interval in seconds between power usage measurements are taken + monitor_epochs = -1 # number of epochs that we want to monitor (-1 means all epochs) + decimal_precision = 10 # desired decimal precision of reported values. + if exp.measure == True: + epochs_before_pred = 0 # number of epochs to use for average used for making the prediction + else: + epochs_before_pred = 1 + if exp.ml == 'training': + # carbon_tracker_epochs = exp.epochs + carbon_tracker_epochs = int(exp.epochs) + elif exp.ml == 'inference': + carbon_tracker_epochs = 1 # fake single epoch + tracker = CarbonTracker(epochs=carbon_tracker_epochs, + update_interval = update_interval, + log_dir = log_dir, + monitor_epochs = monitor_epochs, + epochs_before_pred = epochs_before_pred, + decimal_precision = decimal_precision) + + elif exp.name_calc == 'eco2ai': + output_file = exp.eco2ai_output_file + alpha_2_code="FR" + tracker = eco2ai.Tracker(file_name=output_file, + alpha_2_code=alpha_2_code, + pue=1, + measure_period=2) + + elif exp.name_calc == 'energy_scopium': + # os.system("${ENERGY_SCOPE_SRC_DIR}/energy_scope_record.sh start") + os.system("${ENERGYSCOPIUM_SRC_DIR}/energyscopium_record.sh start") + + elif exp.name_calc == 'green_algorithms': + if exp.automated and exp.parallel: + wait_for_UTIL() + + elif exp.name_calc == 'tapo': + wait_for_TAPO() + + elif exp.name_calc == 'mygmlc' and exp.automated: + tracker = MyGMLC(output_file = exp.mygmlc_output_file, + country='France') + + return tracker + + +def start_calculators(exp, tracker): + + # timestamp - task start + tag = exp.name_calc + ':' + exp.mode_calc + ' task start' + new_timestamp(exp.main_folder, exp.iteration, tag) + + if (exp.name_calc == 'code_carbon') or (exp.name_calc == 'eco2ai'): + tracker.start() + elif (exp.name_calc == 'carbon_tracker') and (exp.ml == 'inference'): + tracker.epoch_start() + elif exp.name_calc == 'energy_scopium': + # os.system("${ENERGY_SCOPE_SRC_DIR}/energy_scope_record.sh tags start tag_" + exp.ml) + os.system("${ENERGYSCOPIUM_SRC_DIR}/energyscopium_record.sh tags start tag_" + exp.ml) + elif exp.name_calc == 'mygmlc' and exp.automated: + tracker.start() + + +def stop_calculators(exp, tracker): + + # timestamp - task stop + tag = exp.name_calc + ':' + exp.mode_calc + ' task stop' + new_timestamp(exp.main_folder, exp.iteration, tag) + + if (exp.name_calc == 'carbon_tracker') and (exp.ml == 'inference'): + tracker.epoch_end() + elif (exp.name_calc == 'code_carbon') or (exp.name_calc == 'eco2ai') or (exp.name_calc == 'carbon_tracker'): + tracker.stop() + elif exp.name_calc == 'energy_scopium': + # os.system("${ENERGY_SCOPE_SRC_DIR}/energy_scope_record.sh tags stop tag_" + exp.ml) + os.system("${ENERGYSCOPIUM_SRC_DIR}/energyscopium_record.sh tags stop tag_" + exp.ml) + os.system("${ENERGYSCOPIUM_SRC_DIR}/energyscopium_record.sh stop") + # os.system("${ENERGYSCOPIUM_SRC_DIR}/energyscopium_record.sh send") + elif exp.name_calc == 'mygmlc' and exp.automated: + tracker.stop() + +def FLOPs_inference_to_training(exp, nb_FLOPs_forward, nb_examples): + + # nb of times we do: forward pass (inference) + loss computation + backward pass + nb_iterations = (nb_examples / exp.train_batch_size) * exp.epochs + + # coeff 3 comes from a blog: + factor = nb_iterations * 3 + nb_FLOPs = nb_FLOPs_forward * factor + + print('Total nb FLOPs of training ', nb_FLOPs) + return(nb_FLOPs) + +def FLOPs_to_energy(exp, nb_FLOPs): + Ec_J = nb_FLOPs/ exp.perf_per_watt + Ec_kWh = Ec_J/(3.6 * 10**6) # (3.6 * 10**6) = nb of J per kWh + print('Energy consumed (kWh): ', Ec_kWh) + return(Ec_kWh) + +def flops_method_pytorch(exp, data_loader, model): + + if exp.name_calc == 'flops': + if exp.name == 'cifar10': + input_channels = 3 + input_x = 32 + input_y = 32 + elif exp.name == 'mnist': + input_channels = 1 + input_x = 28 + input_y = 28 + elif (exp.name == 'CUB_200_2011') or (exp.name == 'image_net'): + input_channels = 3 + input_x = 224 + input_y = 224 + + import torch + from thop import profile + nb_examples = len(data_loader)*exp.train_batch_size + + if (exp.ml == "training"): + input = torch.randn(exp.train_batch_size, input_channels, input_x, input_y) + # the output of profile is in MACs (not gigaMACs or other) + MACs, params = profile(model,inputs=(input, )) + nb_FLOPs_inference = MACs * 2 + print('Total nb of FLOPs inference: ', nb_FLOPs_inference) + nb_FLOPs = FLOPs_inference_to_training(exp, nb_FLOPs_inference, nb_examples) + + elif (exp.ml == "inference"): + input = torch.randn(exp.test_batch_size, input_channels, input_x, input_y) + MACs, params = profile(model,inputs=(input, )) + nb_FLOPs = MACs* 2 + print('Total nb of FLOPs inference: ', nb_FLOPs) + + Ec_kWh = FLOPs_to_energy(exp, nb_FLOPs) + return Ec_kWh + +def flops_method_tensorflow(exp, nb_examples, graph_folder): + + if exp.name_calc == 'flops': + import tensorflow as tf + import os + from google.protobuf import text_format + from tensorflow.python.platform import gfile + + def pbtxt_to_graphdef(filename): + print("\n[CONVERT] Converting from .pbtxt to .pb: '{}'\n".format(filename)) + with open(filename, 'r') as f: + graph_def = tf.compat.v1.GraphDef() + file_content = f.read() + text_format.Merge(file_content, graph_def) + tf.import_graph_def(graph_def, name='') + in_dir = os.path.dirname(filename) + out_filename = os.path.splitext(os.path.basename(filename))[0] + ".pb" + tf.compat.v1.train.write_graph(graph_def, in_dir, out_filename, as_text=False) + print("\n[CONVERT] Wrote file to: '{}'\n".format(os.path.join(in_dir, out_filename))) + + file_name = os.path.join(graph_folder, 'graph.pbtxt') + pbtxt_to_graphdef(file_name) + + def load_pb(pb): + with tf.compat.v1.gfile.GFile(pb, "rb") as f: + graph_def = tf.compat.v1.GraphDef() + graph_def.ParseFromString(f.read()) + with tf.Graph().as_default() as graph: + tf.import_graph_def(graph_def, name='') + return graph + + g = load_pb(os.path.join(graph_folder, 'graph.pb')) + + with g.as_default(): + flops = tf.compat.v1.profiler.profile(g, options = tf.compat.v1.profiler.ProfileOptionBuilder.float_operation()) + print('Total nb FLOPs of forward pass', flops.total_float_ops) + + nb_FLOPs = flops.total_float_ops + if exp.ml == "training": + nb_FLOPs = FLOPs_inference_to_training(exp, nb_FLOPs, nb_examples) + + Ec_kWh = FLOPs_to_energy(exp, nb_FLOPs) + return Ec_kWh \ No newline at end of file diff --git a/fct_for_ga.py b/fct_for_ga.py new file mode 100644 index 0000000..049cc5b --- /dev/null +++ b/fct_for_ga.py @@ -0,0 +1,113 @@ +from PyP100 import PyP110 +import base64 +import json +import datetime +from pprint import pprint +import time +# from multiprocessing import Process, Event +from datetime import datetime, timedelta +import os +import numpy as np + + +def find_parallel_records_data(folder_logs): + """ find the data corresponing to the latest experiment """ + list_files_tool = [] + list_files_task = [] + for file in os.listdir(folder_logs): + if file.endswith("tool.txt"): + list_files_tool.append(file) + if file.endswith("task.txt"): + list_files_task.append(file) + + tmp_file_tool = max(list_files_tool) + print(tmp_file_tool) + + tmp_file_task = max(list_files_task) + print(tmp_file_task) + + data_tool_file = os.path.join(folder_logs, tmp_file_tool) + data_task_file = os.path.join(folder_logs, tmp_file_task) + + with open(data_tool_file, 'r') as f: + data_tool = json.load(f) + + with open(data_task_file, 'r') as f: + data_task = json.load(f) + + return(data_tool, data_task) + + +def select_data_UTIL(t_start, t_end, data_util): + + cpu_util_list = np.array(data_util['cpu_util']) + gpu_util_list = np.array(data_util['gpu_util']) + ram_util_list = np.array(data_util['ram_util']) + time_list = np.array(data_util['time']) + + idx1 = time_list > t_start + idx2 = time_list < t_end + idx = np.array([idx1[k] and idx2[k] for k in range(len(idx1))]) + + time_list_task = time_list[idx] + cpu_util_list_task = cpu_util_list[idx] + gpu_util_list_task = gpu_util_list[idx] + ram_util_list_task = ram_util_list[idx] + + cpu_util_mean = np.mean(cpu_util_list_task) + gpu_util_mean = np.mean(gpu_util_list_task) + ram_util_mean = np.mean(ram_util_list_task) + + return(cpu_util_mean, gpu_util_mean, ram_util_mean) + + +def wait_for_UTIL(): + with open('UTIL-VAR.json', 'r') as f: + d = json.load(f) + UTIL_RUN = d['UTIL_RUN'] + while UTIL_RUN == False: + with open('UTIL-VAR.json', 'r') as f: + d = json.load(f) + UTIL_RUN = d['UTIL_RUN'] + time.sleep(1) + + +def stop_UTIL(exp, t0, tfinal): + with open('UTIL-VAR.json', 'r') as f: + d = json.load(f) + d['UTIL_RUN']=False + with open('UTIL-VAR.json', 'w') as f: + json.dump(d, f) + + data = {} + data[exp.ml + '_start'] = t0 + data[exp.ml + '_end'] = tfinal + str_current_datetime = datetime.now().strftime("%Y%m%d-%H%M%S") + file_name = os.path.join(exp.path_logs_and_results,'util_logs', str_current_datetime+"_task.txt") + with open(file_name, 'w') as f: + json.dump(data, f, indent = 4, sort_keys=True) + + +def mean_parallel_UTIL(exp): + + folder_traces = os.path.join(exp.path_logs_and_results, 'util_logs') + + UTIL_SAVED = False + while UTIL_SAVED == False: + with open('UTIL-VAR.json', 'r') as f: + d = json.load(f) + UTIL_SAVED = d['UTIL_SAVED'] + time.sleep(1) + print(UTIL_SAVED) + + data_util, data_task = find_parallel_records_data(folder_traces) + print('') + print('Data UTIL: ', data_util) + print('') + print('Data ML Task: ', data_task) + + t0 = data_task[exp.ml+'_start'] + tfinal = data_task[exp.ml+'_end'] + + cpu_util, gpu_util, ram_util = select_data_UTIL(t0, tfinal, data_util) + return(cpu_util, gpu_util, ram_util) \ No newline at end of file diff --git a/fct_for_saving.py b/fct_for_saving.py new file mode 100644 index 0000000..327954d --- /dev/null +++ b/fct_for_saving.py @@ -0,0 +1,421 @@ +import json +import os +import pandas as pd +from datetime import timedelta +import numpy as np +from fct_for_tapo import find_tapo_data, select_data +import time +from carbontracker import parser as CTparser +from fct_for_ES import fct_time_energy, fct_find_data +from ga_automatic_request import do_request + +def _save(file_name, exp, meas_epochs, time, meas_energy, meas_co2): + + name_exp = exp.name + ml = exp.ml + comp = exp.comp + dev = exp.device_name + + + calc_short = {'code_carbon:online':'CC:on', + 'code_carbon:offline':'CC:off', + 'carbon_tracker:measure':'CT:meas', + 'carbon_tracker:predict':'CT:pred', + 'eco2ai':'ECO2AI', + 'energy_scopium':'ES', + 'flops':'FLOPS', + 'green_algorithms:default':'GA:def', + 'green_algorithms:automated':'GA:auto', + 'green_algorithms:automated_parallel':'GA:auto-para', + 'tapo':'TAPO', + 'no_calculator':'NOCALC'} + + key = exp.name_calc + if key == 'code_carbon': + if exp.online == True: + key = key + ':' + 'online' + else: + key = key + ':' + 'offline' + elif key == 'carbon_tracker': + if exp.measure == True: + key = key + ':' + 'measure' + else: + key = key + ':' + 'predict' + elif key == 'green_algorithms': + if exp.automated == False: + key = key + ':' + 'default' + else: + if exp.parallel == True: + key = key + ':' + 'automated_parallel' + else: + key = key + ':' + 'automated' + + print('---------------------> key', key) + calc = calc_short[key] + + print('# -------------- #') + print('# --- Saving --- #') + print('# -------------- #') + + print('Experience is: ', name_exp) + print('ML phase is: ', ml) + print('Computer is: ',comp) + print('Torch device is: ', dev) + print('Calculator is: ', calc) + + with open(file_name, 'r') as f: + measurements_dict = json.load(f) + + my_info = measurements_dict[name_exp][ml][comp][dev][calc] + print('Data before: ', my_info) + + my_info["epochs"].append(meas_epochs) + my_info["time"].append(time) + my_info["energy_consumed"].append(meas_energy) + my_info["co2_emissions"].append(meas_co2) + + print('Data after: ', my_info) + + with open(file_name, 'w') as f: + json.dump(measurements_dict, f, indent = 4, sort_keys=True) + +def save_data(exp, meas_epochs, meas_time, calc_time, meas_energy, meas_co2): + + tmp = exp.path_logs_and_results + tmp = tmp.split("/") + print(tmp) + tmp = tmp[:-1] + tmp = "/".join(tmp) + print(tmp) + + file1 = os.path.join(tmp, 'res_calc-time.json') + file2 = os.path.join(tmp, 'res_meas-time.json') + + print('# --- Saving time by hand --- #') + _save(file2, exp, meas_epochs, meas_time, meas_energy, meas_co2) + + print('# --- Saving time by calc --- #') + if calc_time != None: + _save(file1, exp, meas_epochs, calc_time, meas_energy, meas_co2) + + +# ------------------- # +# --- Code Carbon --- # +# ------------------- # + +def save_cc(exp, args_parser, duration): + # Saving the data in the json file + + output_file_name = exp.cc_output_file + file = pd.read_csv(output_file_name) + df=pd.DataFrame(file) + + meas_epochs = exp.epochs + + if exp.ml == 'training': + meas_time = duration + calc_time = df["duration"].iloc[-1] + meas_energy = df["energy_consumed"].iloc[-1] + meas_co2 = df["emissions"].iloc[-1] + elif exp.ml == 'inference': + nb_inferences = args_parser.nb_batch_inferences + meas_time = duration/nb_inferences + calc_time = df["duration"].iloc[-1]/nb_inferences + meas_energy = df["energy_consumed"].iloc[-1]/nb_inferences + meas_co2 = df["emissions"].iloc[-1]/nb_inferences + save_data(exp, meas_epochs, meas_time, calc_time, meas_energy, meas_co2) + + # os.remove(output_file_name) + + +# ---------------------- # +# --- Carbon Tracker --- # +# ---------------------- # + +def save_ct(exp, args_parser, t): + # Saving the data in the json file + + log_dir = exp.ct_log_dir + meas_epochs = exp.epochs + + PUE_2022 = 1.59 + + nb_inferences = args_parser.nb_batch_inferences + if exp.ml == "inference": + factor = nb_inferences + else: + factor = 1 + + # Results from the parser + logs = CTparser.parse_all_logs(log_dir=log_dir) + first_log = logs[0] + print(first_log) + d1 = first_log['actual'] + if exp.measure == True: + print ("{:<22} {:<15}".format('','Measured')) + else: + d1 = first_log['pred'] + print ("{:<22} {:<15}".format('','Predicted')) + + meas_time = t/factor + calc_time = d1["duration (s)"]/factor + meas_energy = d1["energy (kWh)"]/factor/PUE_2022 + meas_co2 = d1["co2eq (g)"]/factor*10**(-3)/PUE_2022 + if meas_co2 != meas_co2: + meas_co2 = "N/A" + + + save_data(exp, meas_epochs, meas_time, calc_time, meas_energy, meas_co2) + + + for k in d1.keys(): + if k == "equivalents": + if d1[k] != None: + for kk in d1[k].keys(): + s0 = kk + s1 = d1[k][kk] + s1 = float(s1)/factor + print("{:<22} {:<15}".format(s0, s1)) + elif k == 'epochs' and exp.ml == "inference": + s0 = k + s1 = "N/A" + print("{:<22} {:<15}".format(s0, s1)) + else: + s0 = k + s1 = d1[k] + s1 = float(s1)/factor + print("{:<22} {:<15}".format(s0, s1)) + +# ------------------- # +# --- Eco2AI --- # +# ------------------- # + +def save_eco2ai(exp, args_parser, duration): + # Saving the data in the json file + + output_file = exp.eco2ai_output_file + file = pd.read_csv(output_file) + df=pd.DataFrame(file) + meas_epochs = exp.epochs + + if exp.ml == 'training': + meas_time = duration + calc_time = df["duration(s)"].iloc[-1] + meas_energy = df["power_consumption(kWh)"].iloc[-1] + meas_co2 = df["CO2_emissions(kg)"].iloc[-1] + elif exp.ml == 'inference': + nb_inferences = args_parser.nb_batch_inferences + meas_time = duration/nb_inferences + calc_time = df["duration(s)"].iloc[-1]/nb_inferences + meas_energy = df["power_consumption(kWh)"].iloc[-1]/nb_inferences + meas_co2 = df["CO2_emissions(kg)"].iloc[-1]/nb_inferences + + save_data(exp, meas_epochs, meas_time, calc_time, meas_energy, meas_co2) + + # os.remove(output_file) + + +# --------------------- # +# --- EnergyScopium --- # +# --------------------- # + +def save_ES(exp, args_parser, duration): + # Saving the data in the json file + + # ES data corresponding to this experiment + # folder_traces = os.environ['ENERGY_SCOPE_TRACES_PATH'] + folder_traces = os.environ['ENERGYSCOPIUM_TRACES_PATH'] + ES_data = fct_find_data(folder_traces) + meas_co2 = "N/A" + meas_epochs = exp.epochs + + if exp.ml == "training": + tag = 'tag_training' + meas_time = duration + calc_time, meas_energy = fct_time_energy(ES_data, tag) + elif exp.ml == 'inference': + nb_inferences = args_parser.nb_batch_inferences + tag = 'tag_inference' + meas_time = duration/nb_inferences + calc_time, meas_energy = fct_time_energy(ES_data, tag) + calc_time = meas_time/nb_inferences + meas_energy = meas_energy/nb_inferences + + save_data(exp, meas_epochs, meas_time, calc_time, meas_energy, meas_co2) + + +# ------------------------ # +# --- Green Algorithms --- # +# ------------------------ # + +def save_ga(exp, args_parser, duration, + util_tracking, cpu_util, gpu_util, ram_util): + # Saving the data in the json file + + calc_time = None + meas_epochs = exp.epochs + + if exp.ml == 'training': + meas_time = duration + factor = 1 + elif exp.ml == 'inference': + nb_inferences = args_parser.nb_batch_inferences + factor = nb_inferences + + td = timedelta(seconds = duration) + td = str(td) + print('td : ', td) + td = td.split(':') + print('split td : ', td) + td_h = td[0] # hours as int + print('hours : ', td_h) + td_m = td[1] # minutes as int + print('minutes : ', td_m) + td_sm = td[2] # seconds as floating numbers + print('seconds : ', td_sm) + td_s = str(float(td_sm)/60) # seconds as fractions of minutes + print('frac of min: ', td_s) + td_m = float(td_m)+ float(td_s) + print('float min : ', td_m) + runTime_hour_input = int(td_h) + runTime_min_input = td_m + + print('Hours :', runTime_hour_input) + print('Minutes :', runTime_min_input) + + GA_inputs = {} + + if util_tracking: + usageCPU_radio = "Yes" + usageCPU_input = np.mean(cpu_util)/100 # CPU utl mean + usageGPU_radio = "Yes" + usageGPU_input = np.mean(gpu_util) # GPU utl mean + usageRAM_input = np.mean(ram_util) # RAM utl mean + + print('cpu usage : ', usageCPU_input) + print('gpu usage : ', usageGPU_input) + print('ram usage : ', usageRAM_input) + + GA_inputs['runTime_hour_input']=runTime_hour_input + GA_inputs['runTime_min_input']=runTime_min_input + GA_inputs['usageCPU_radio']=usageCPU_radio + GA_inputs['usageCPU_input']=usageCPU_input + GA_inputs['usageGPU_radio']=usageGPU_radio + GA_inputs['usageGPU_input']=usageGPU_input + GA_inputs['usageRAM_input']=usageRAM_input + + try: + output_GA = do_request(runTime_hour_input = runTime_hour_input, + runTime_min_input = runTime_min_input, + usageCPU_radio = usageCPU_radio, + usageCPU_input = usageCPU_input, + usageGPU_radio = usageGPU_radio, + usageGPU_input = usageGPU_input, + memory_input = usageRAM_input) + except Exception as e: + print('Communication with GA failed: ', e) + file_name_ga = os.path.join(exp.path_logs_and_results, 'GA_inputs.json') + with open(file_name_ga, 'w') as f: + json.dump(GA_inputs, f, indent = 4) + else: + GA_inputs['runTime_hour_input']=runTime_hour_input + GA_inputs['runTime_min_input']=runTime_min_input + + try: + output_GA = do_request(runTime_hour_input = runTime_hour_input, + runTime_min_input = runTime_min_input) + except Exception as e: + print('Communication with GA failed: ', e) + file_name_ga = os.path.join(exp.path_logs_and_results, 'GA_inputs.json') + with open(file_name_ga, 'w') as f: + json.dump(GA_inputs, f, indent = 4) + + meas_time = duration/factor + meas_energy = output_GA["energy_needed"]/factor + meas_co2 = output_GA["carbonEmissions"]*10**(-3)/factor + save_data(exp, meas_epochs, meas_time, calc_time, meas_energy, meas_co2) + + +# ------------------- # +# --- TAPO --- # +# ------------------- # + +def save_tapo(exp, args_parser): + # Saving the data in the json file + + folder_traces = os.path.join(exp.path_logs_and_results,'tapo_logs') + + with open('TAPO-VAR.json', 'r') as f: + d = json.load(f) + TAPOSAVED = d['TAPOSAVED'] + print(TAPOSAVED) + while TAPOSAVED == False: + with open('TAPO-VAR.json', 'r') as f: + d = json.load(f) + TAPOSAVED = d['TAPOSAVED'] + time.sleep(1) + print(TAPOSAVED) + + data_tapo, data_task = find_tapo_data(folder_traces) + print('') + print('Data TAPO: ', data_tapo) + print('') + print('Data ML Task: ', data_task) + + t0 = data_task[exp.ml+'_start'] + tf = data_task[exp.ml+'_end'] + duration = tf - t0 + Ec_kWh = select_data(t0, tf, data_tapo) + print('Energy consumed: ', Ec_kWh) + calc_time = None + meas_co2 = "N/A" + meas_epochs = exp.epochs + + if exp.ml == "training": + meas_time = duration + meas_energy = Ec_kWh + elif exp.ml == "inference": + # nb_inferences = args.nb_batch_inferences*test_batch_size + nb_inferences = args_parser.nb_batch_inferences + meas_time = duration/nb_inferences + meas_energy = Ec_kWh/nb_inferences + + save_data(exp, meas_epochs, meas_time, calc_time, meas_energy, meas_co2) + + +# ------------------- # +# --- FLOPs --- # +# ------------------- # + +def save_FLOPS(exp, args_parser, Ec_kWh): + # Saving the data in the json file + + meas_time = "N/A" + calc_time = None + meas_co2 = "N/A" + meas_energy = Ec_kWh + meas_epochs = exp.epochs + + save_data(exp, meas_epochs, meas_time, calc_time, meas_energy, meas_co2) + + +# --------------------- # +# --- No calculator --- # +# --------------------- # + +def save_nocalc(exp, args_parser, duration): + # Saving the data in the json file + + meas_energy = "N/A" + meas_co2 = "N/A" + calc_time = None + meas_epochs = exp.epochs + + if exp.ml == "training": + meas_time = duration + elif exp.ml == "inference": + # nb_inferences = args.nb_batch_inferences*test_batch_size + nb_inferences = args_parser.nb_batch_inferences + meas_time = duration/nb_inferences + + save_data(exp, meas_epochs, meas_time, calc_time, meas_energy, meas_co2) \ No newline at end of file diff --git a/fct_for_tapo.py b/fct_for_tapo.py new file mode 100644 index 0000000..bedd599 --- /dev/null +++ b/fct_for_tapo.py @@ -0,0 +1,75 @@ +from PyP100 import PyP110 +import base64 +import json +import datetime +from pprint import pprint +from time import time, sleep +# from multiprocessing import Process, Event +from datetime import datetime, timedelta +import os +import numpy as np + +def find_tapo_data(folder_tapo_logs): + """ find the data corresponing to the latest experiment """ + list_files_tapo = [] + list_files_task = [] + for file in os.listdir(folder_tapo_logs): + if file.endswith("tapo.txt"): + list_files_tapo.append(file) + if file.endswith("task.txt"): + list_files_task.append(file) + + tmp_file_tapo = max(list_files_tapo) + print(tmp_file_tapo) + + tmp_file_task = max(list_files_task) + print(tmp_file_task) + + data_tapo_file = os.path.join(folder_tapo_logs, tmp_file_tapo) + data_task_file = os.path.join(folder_tapo_logs, tmp_file_task) + + with open(data_tapo_file, 'r') as f: + data_tapo = json.load(f) + + with open(data_task_file, 'r') as f: + data_task = json.load(f) + + return(data_tapo, data_task) + + +def select_data(t_start, t_end, data_tapo): + time_list = np.array(data_tapo['time']) + power_list = np.array(data_tapo['power']) + idx1 = time_list > t_start + idx2 = time_list < t_end + idx = np.array([idx1[k] and idx2[k] for k in range(len(idx1))]) + time_list_training = time_list[idx] + power_list_training = power_list[idx] + Ec_J = np.trapz(power_list_training, x=time_list_training) + Ec_kWh = Ec_J/(3.6*10**6) + return(Ec_kWh) + +def wait_for_TAPO(): + with open('TAPO-VAR.json', 'r') as f: + d = json.load(f) + TAPORUN = d['TAPORUN'] + while TAPORUN == False: + with open('TAPO-VAR.json', 'r') as f: + d = json.load(f) + TAPORUN = d['TAPORUN'] + sleep(1) + +def stop_TAPO(exp, t0, tfinal): + with open('TAPO-VAR.json', 'r') as f: + d = json.load(f) + d['TAPORUN']=False + with open('TAPO-VAR.json', 'w') as f: + json.dump(d, f) + + data = {} + data[exp.ml + '_start'] = t0 + data[exp.ml + '_end'] = tfinal + str_current_datetime = datetime.now().strftime("%Y%m%d-%H%M%S") + file_name = os.path.join(exp.path_logs_and_results,'tapo_logs', str_current_datetime+"_task.txt") + with open(file_name, 'w') as f: + json.dump(data, f, indent = 4, sort_keys=True) \ No newline at end of file diff --git a/ga_automatic_request.py b/ga_automatic_request.py new file mode 100644 index 0000000..0271fa5 --- /dev/null +++ b/ga_automatic_request.py @@ -0,0 +1,65 @@ +import requests +import json +import sys +import os +from pprint import pprint +_path = '.' +sys.path.append(os.path.join(_path)) + +def do_request( + runTime_hour_input: int = 12, + runTime_min_input: float = 0.0, + coreType_dropdown: str = 'Both', + CPUmodel_dropdown: str = 'other', + numberCPUs_input: int = 8, + tdpCPU_input: int = 95/8, + GPUmodel_dropdown: str = 'other', + numberGPUs_input: int = 1, + tdpGPU_input: int = 250, + memory_input: int = 64, + location_country_dropdown: str = 'France', + usageCPU_radio: str = "No", + usageCPU_input: float = 1.0, + usageGPU_radio: str = "No", + usageGPU_input: float = 1.0, + platform_type: str = "personalComputer", +) -> dict: + + # Load default json file + f = open(os.path.join(_path, 'GA_request_data.json')) + data = json.load(f) + + # Modify values if needed + data['inputs'][1]['value'] = coreType_dropdown + data['inputs'][2]['value'] = numberCPUs_input + data['inputs'][3]['value'] = CPUmodel_dropdown + data['inputs'][5]['value'] = tdpCPU_input + data['inputs'][6]['value'] = numberGPUs_input + data['inputs'][7]['value'] = GPUmodel_dropdown + data['inputs'][9]['value'] = tdpGPU_input + data['inputs'][10]['value'] = memory_input + data['inputs'][11]['value'] = runTime_hour_input + data['inputs'][12]['value'] = runTime_min_input + data['inputs'][14]['value'] = location_country_dropdown + data['inputs'][20]['value'] = usageCPU_radio + data['inputs'][21]['value'] = usageCPU_input + data['inputs'][22]['value'] = usageGPU_radio + data['inputs'][23]['value'] = usageGPU_input + data['inputs'][29]['value'] = platform_type + + # Perform request + response = requests.post( + 'http://calculator.green-algorithms.org/_dash-update-component', + json=data, + verify=False, + ) + + # Prepare out in json format + response_json = response.json() + + output_json = { + "energy_needed": response_json["response"]["aggregate_data"]["data"]["energy_needed"], + "carbonEmissions": response_json["response"]["aggregate_data"]["data"]["carbonEmissions"] + } + + return output_json \ No newline at end of file diff --git a/initialisation_experiment.py b/initialisation_experiment.py new file mode 100644 index 0000000..d0ec91f --- /dev/null +++ b/initialisation_experiment.py @@ -0,0 +1,46 @@ +import os +from datetime import datetime +from dictionary_list_initialisation import dictionary_list_initialisation +from dictionary_TAPO_VAR_initialisation import dictionary_TAPO_VAR_initialisation +from dictionary_UTIL_VAR_initialisation import dictionary_UTIL_VAR_initialisation +import json + + +# Parent Directory path +parent_dir = "/home/demouser/Documents/Demos/energycalculatorsevaluation/logs_and_results" + +# Directory +str_current_datetime = datetime.now().strftime("%Y%m%d-%H%M%S") +directory = str_current_datetime + "_logs_and_results" + +# Path +path = os.path.join(parent_dir, directory) + +# Create the directory +os.mkdir(path) +print("Directory '% s' created" % directory) + + + +# save directory name txt file +file_name = os.path.join(parent_dir, 'current_logs_and_results_folder.txt') +with open(file_name, 'w') as f: + f.write(path) + +# file_name = os.path.join(parent_dir, 'current_logs_and_results_folder.txt') +# with open(file_name, 'r') as f: +# print(f.read()) + +# os.mkdir(os.path.join(path, 'carbon_tracker_logs')) +# os.mkdir(os.path.join(path, 'tapo_logs')) +# os.mkdir(os.path.join(path, 'term_logs')) +# os.mkdir(os.path.join(path, 'util_logs')) + +dictionary_list_initialisation(path) +dictionary_TAPO_VAR_initialisation() +dictionary_UTIL_VAR_initialisation() + + +file = os.path.join(path, 'timestamps.json') +with open(file, 'w') as f: + json.dump({}, f, indent = 4) diff --git a/logs_and_results/README.txt b/logs_and_results/README.txt new file mode 100644 index 0000000..5542ea8 --- /dev/null +++ b/logs_and_results/README.txt @@ -0,0 +1 @@ +To save raw and modified data of experiments. diff --git a/models/bert_saved_model/graph.pb b/models/bert_saved_model/graph.pb new file mode 100644 index 0000000..139d06a Binary files /dev/null and b/models/bert_saved_model/graph.pb differ diff --git a/models/bert_saved_model/graph.pbtxt b/models/bert_saved_model/graph.pbtxt new file mode 100644 index 0000000..b6a28a3 --- /dev/null +++ b/models/bert_saved_model/graph.pbtxt @@ -0,0 +1,550028 @@ +node { + name: "global_step/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@global_step" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT64 + tensor_shape { + } + int64_val: 0 + } + } + } +} +node { + name: "global_step" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@global_step" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + attr { + key: "shape" + value { + shape { + } + } + } + attr { + key: "shared_name" + value { + s: "global_step" + } + } +} +node { + name: "global_step/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "global_step" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_step/Assign" + op: "AssignVariableOp" + input: "global_step" + input: "global_step/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "global_step/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "global_step" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } +} +node { + name: "global_step/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "global_step" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_step/cond" + op: "If" + input: "global_step/VarIsInitializedOp" + input: "global_step" + input: "global_step/Initializer/zeros" + attr { + key: "Tcond" + value { + type: DT_BOOL + } + } + attr { + key: "Tin" + value { + list { + type: DT_RESOURCE + type: DT_INT64 + } + } + } + attr { + key: "Tout" + value { + list { + type: DT_INT64 + } + } + } + attr { + key: "_lower_using_switch_merge" + value { + b: true + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "_read_only_resource_inputs" + value { + list { + i: 1 + } + } + } + attr { + key: "else_branch" + value { + func { + name: "global_step_cond_false_7" + } + } + } + attr { + key: "output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "then_branch" + value { + func { + name: "global_step_cond_true_6" + } + } + } +} +node { + name: "global_step/cond/Identity" + op: "Identity" + input: "global_step/cond" + attr { + key: "T" + value { + type: DT_INT64 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_step/add/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT64 + tensor_shape { + } + int64_val: 0 + } + } + } +} +node { + name: "global_step/add" + op: "AddV2" + input: "global_step/cond/Identity" + input: "global_step/add/y" + attr { + key: "T" + value { + type: DT_INT64 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "Const" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "models/bert_saved_model/train.tf_record" + } + } + } +} +node { + name: "flat_filenames/shape" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: -1 + } + } + } +} +node { + name: "flat_filenames" + op: "Reshape" + input: "Const" + input: "flat_filenames/shape" + device: "/device:CPU:0" + attr { + key: "T" + value { + type: DT_STRING + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } +} +node { + name: "TensorSliceDataset" + op: "TensorSliceDataset" + input: "flat_filenames" + device: "/device:CPU:0" + attr { + key: "Toutput_types" + value { + list { + type: DT_STRING + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "is_files" + value { + b: true + } + } + attr { + key: "metadata" + value { + s: "" + } + } + attr { + key: "output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "replicate_on_split" + value { + b: false + } + } + experimental_type { + type_id: TFT_PRODUCT + args { + type_id: TFT_DATASET + args { + type_id: TFT_PRODUCT + args { + type_id: TFT_TENSOR + args { + type_id: TFT_STRING + } + } + } + } + } +} +node { + name: "FlatMapDataset" + op: "FlatMapDataset" + input: "TensorSliceDataset" + device: "/device:CPU:0" + attr { + key: "Targuments" + value { + list { + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "f" + value { + func { + name: "__inference_Dataset_flat_map_read_one_file_30" + attr { + key: "_tf_data_function" + value { + b: true + } + } + } + } + } + attr { + key: "metadata" + value { + s: "" + } + } + attr { + key: "output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "output_types" + value { + list { + type: DT_STRING + } + } + } + experimental_type { + type_id: TFT_PRODUCT + args { + type_id: TFT_DATASET + args { + type_id: TFT_PRODUCT + args { + type_id: TFT_TENSOR + args { + type_id: TFT_STRING + } + } + } + } + } +} +node { + name: "count" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT64 + tensor_shape { + } + int64_val: -1 + } + } + } +} +node { + name: "RepeatDataset" + op: "RepeatDataset" + input: "FlatMapDataset" + input: "count" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "metadata" + value { + s: "" + } + } + attr { + key: "output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "output_types" + value { + list { + type: DT_STRING + } + } + } + experimental_type { + type_id: TFT_PRODUCT + args { + type_id: TFT_DATASET + args { + type_id: TFT_PRODUCT + args { + type_id: TFT_TENSOR + args { + type_id: TFT_STRING + } + } + } + } + } +} +node { + name: "buffer_size" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT64 + tensor_shape { + } + int64_val: 100 + } + } + } +} +node { + name: "seed" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT64 + tensor_shape { + } + int64_val: 0 + } + } + } +} +node { + name: "seed2" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT64 + tensor_shape { + } + int64_val: 0 + } + } + } +} +node { + name: "ShuffleDataset" + op: "ShuffleDataset" + input: "RepeatDataset" + input: "buffer_size" + input: "seed" + input: "seed2" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "metadata" + value { + s: "" + } + } + attr { + key: "output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "output_types" + value { + list { + type: DT_STRING + } + } + } + attr { + key: "reshuffle_each_iteration" + value { + b: true + } + } + experimental_type { + type_id: TFT_PRODUCT + args { + type_id: TFT_DATASET + args { + type_id: TFT_PRODUCT + args { + type_id: TFT_TENSOR + args { + type_id: TFT_STRING + } + } + } + } + } +} +node { + name: "batch_size" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT64 + tensor_shape { + } + int64_val: 3 + } + } + } +} +node { + name: "num_parallel_calls" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT64 + tensor_shape { + } + int64_val: 3 + } + } + } +} +node { + name: "drop_remainder" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_BOOL + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_BOOL + tensor_shape { + } + bool_val: true + } + } + } +} +node { + name: "MapAndBatchDataset" + op: "MapAndBatchDataset" + input: "ShuffleDataset" + input: "batch_size" + input: "num_parallel_calls" + input: "drop_remainder" + device: "/device:CPU:0" + attr { + key: "Targuments" + value { + list { + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "f" + value { + func { + name: "__inference_tf_data_experimental_map_and_batch_lambda_68" + attr { + key: "_tf_data_function" + value { + b: true + } + } + } + } + } + attr { + key: "metadata" + value { + s: "" + } + } + attr { + key: "output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + shape { + dim { + size: 3 + } + } + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "output_types" + value { + list { + type: DT_INT32 + type: DT_INT32 + type: DT_INT32 + type: DT_INT32 + type: DT_INT32 + type: DT_INT32 + } + } + } + attr { + key: "preserve_cardinality" + value { + b: true + } + } + experimental_type { + type_id: TFT_PRODUCT + args { + type_id: TFT_DATASET + args { + type_id: TFT_PRODUCT + args { + type_id: TFT_TENSOR + args { + type_id: TFT_INT32 + } + } + args { + type_id: TFT_TENSOR + args { + type_id: TFT_INT32 + } + } + args { + type_id: TFT_TENSOR + args { + type_id: TFT_INT32 + } + } + args { + type_id: TFT_TENSOR + args { + type_id: TFT_INT32 + } + } + args { + type_id: TFT_TENSOR + args { + type_id: TFT_INT32 + } + } + args { + type_id: TFT_TENSOR + args { + type_id: TFT_INT32 + } + } + } + } + } +} +node { + name: "IteratorV2" + op: "IteratorV2" + device: "/device:CPU:0" + attr { + key: "_class" + value { + list { + s: "loc:@MapAndBatchDataset" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + shape { + dim { + size: 3 + } + } + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "output_types" + value { + list { + type: DT_INT32 + type: DT_INT32 + type: DT_INT32 + type: DT_INT32 + type: DT_INT32 + type: DT_INT32 + } + } + } + attr { + key: "shared_name" + value { + s: "" + } + } +} +node { + name: "MakeIterator" + op: "MakeIterator" + input: "MapAndBatchDataset" + input: "IteratorV2" + device: "/device:CPU:0" + attr { + key: "_class" + value { + list { + s: "loc:@MapAndBatchDataset" + } + } + } + experimental_type { + type_id: TFT_PRODUCT + } +} +node { + name: "IteratorToStringHandle" + op: "IteratorToStringHandle" + input: "IteratorV2" + device: "/device:CPU:0" + attr { + key: "_class" + value { + list { + s: "loc:@MapAndBatchDataset" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "IteratorGetNext" + op: "IteratorGetNext" + input: "IteratorV2" + device: "/device:CPU:0" + attr { + key: "_class" + value { + list { + s: "loc:@MapAndBatchDataset" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + shape { + dim { + size: 3 + } + } + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + shape { + dim { + size: 3 + } + } + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "output_types" + value { + list { + type: DT_INT32 + type: DT_INT32 + type: DT_INT32 + type: DT_INT32 + type: DT_INT32 + type: DT_INT32 + } + } + } +} +node { + name: "bert/embeddings/ExpandDims/dim" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: -1 + } + } + } +} +node { + name: "bert/embeddings/ExpandDims" + op: "ExpandDims" + input: "IteratorGetNext:1" + input: "bert/embeddings/ExpandDims/dim" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "Tdim" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/embeddings/word_embeddings/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/word_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: ":w\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/embeddings/word_embeddings/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/word_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/embeddings/word_embeddings/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/word_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/embeddings/word_embeddings/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/embeddings/word_embeddings/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/word_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/embeddings/word_embeddings/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/embeddings/word_embeddings/Initializer/truncated_normal/TruncatedNormal" + input: "bert/embeddings/word_embeddings/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/word_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/embeddings/word_embeddings/Initializer/truncated_normal" + op: "AddV2" + input: "bert/embeddings/word_embeddings/Initializer/truncated_normal/mul" + input: "bert/embeddings/word_embeddings/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/word_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/embeddings/word_embeddings" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/word_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/embeddings/word_embeddings" + } + } +} +node { + name: "bert/embeddings/word_embeddings/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/embeddings/word_embeddings" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/embeddings/word_embeddings/Assign" + op: "AssignVariableOp" + input: "bert/embeddings/word_embeddings" + input: "bert/embeddings/word_embeddings/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/embeddings/word_embeddings/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/word_embeddings" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/embeddings/Reshape/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: -1 + } + } + } +} +node { + name: "bert/embeddings/Reshape" + op: "Reshape" + input: "bert/embeddings/ExpandDims" + input: "bert/embeddings/Reshape/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "bert/embeddings/Gather" + op: "ResourceGather" + input: "bert/embeddings/word_embeddings" + input: "bert/embeddings/Reshape" + attr { + key: "Tindices" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "batch_dims" + value { + i: 0 + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_indices" + value { + b: true + } + } +} +node { + name: "bert/embeddings/Identity" + op: "Identity" + input: "bert/embeddings/Gather" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/embeddings/Reshape_1/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/embeddings/Reshape_1" + op: "Reshape" + input: "bert/embeddings/Identity" + input: "bert/embeddings/Reshape_1/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/token_type_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\002\000\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/token_type_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/token_type_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/embeddings/token_type_embeddings/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/token_type_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/embeddings/token_type_embeddings/Initializer/truncated_normal/TruncatedNormal" + input: "bert/embeddings/token_type_embeddings/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/token_type_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/Initializer/truncated_normal" + op: "AddV2" + input: "bert/embeddings/token_type_embeddings/Initializer/truncated_normal/mul" + input: "bert/embeddings/token_type_embeddings/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/token_type_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/embeddings/token_type_embeddings" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/token_type_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/embeddings/token_type_embeddings" + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/embeddings/token_type_embeddings" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/Assign" + op: "AssignVariableOp" + input: "bert/embeddings/token_type_embeddings" + input: "bert/embeddings/token_type_embeddings/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/token_type_embeddings" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/embeddings/Reshape_2/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: -1 + } + } + } +} +node { + name: "bert/embeddings/Reshape_2" + op: "Reshape" + input: "IteratorGetNext:3" + input: "bert/embeddings/Reshape_2/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "bert/embeddings/one_hot/on_value" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/embeddings/one_hot/off_value" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/embeddings/one_hot/depth" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 2 + } + } + } +} +node { + name: "bert/embeddings/one_hot" + op: "OneHot" + input: "bert/embeddings/Reshape_2" + input: "bert/embeddings/one_hot/depth" + input: "bert/embeddings/one_hot/on_value" + input: "bert/embeddings/one_hot/off_value" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "TI" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 2 + } + } + } + } + } + attr { + key: "axis" + value { + i: -1 + } + } +} +node { + name: "bert/embeddings/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/token_type_embeddings" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/embeddings/MatMul" + op: "MatMul" + input: "bert/embeddings/one_hot" + input: "bert/embeddings/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/embeddings/Reshape_3/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/embeddings/Reshape_3" + op: "Reshape" + input: "bert/embeddings/MatMul" + input: "bert/embeddings/Reshape_3/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/embeddings/add" + op: "AddV2" + input: "bert/embeddings/Reshape_1" + input: "bert/embeddings/Reshape_3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/embeddings/assert_less_equal/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 384 + } + } + } +} +node { + name: "bert/embeddings/assert_less_equal/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 512 + } + } + } +} +node { + name: "bert/embeddings/assert_less_equal/LessEqual" + op: "LessEqual" + input: "bert/embeddings/assert_less_equal/x" + input: "bert/embeddings/assert_less_equal/y" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/embeddings/assert_less_equal/Rank" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 0 + } + } + } +} +node { + name: "bert/embeddings/assert_less_equal/range/start" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 0 + } + } + } +} +node { + name: "bert/embeddings/assert_less_equal/range/delta" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/embeddings/assert_less_equal/range" + op: "Range" + input: "bert/embeddings/assert_less_equal/range/start" + input: "bert/embeddings/assert_less_equal/Rank" + input: "bert/embeddings/assert_less_equal/range/delta" + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } +} +node { + name: "bert/embeddings/assert_less_equal/All" + op: "All" + input: "bert/embeddings/assert_less_equal/LessEqual" + input: "bert/embeddings/assert_less_equal/range" + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "bert/embeddings/assert_less_equal/Assert/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "Condition x <= y did not hold element-wise:" + } + } + } +} +node { + name: "bert/embeddings/assert_less_equal/Assert/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "x (bert/embeddings/assert_less_equal/x:0) = " + } + } + } +} +node { + name: "bert/embeddings/assert_less_equal/Assert/Const_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "y (bert/embeddings/assert_less_equal/y:0) = " + } + } + } +} +node { + name: "bert/embeddings/assert_less_equal/Assert/Assert/data_0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "Condition x <= y did not hold element-wise:" + } + } + } +} +node { + name: "bert/embeddings/assert_less_equal/Assert/Assert/data_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "x (bert/embeddings/assert_less_equal/x:0) = " + } + } + } +} +node { + name: "bert/embeddings/assert_less_equal/Assert/Assert/data_3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "y (bert/embeddings/assert_less_equal/y:0) = " + } + } + } +} +node { + name: "bert/embeddings/assert_less_equal/Assert/Assert" + op: "Assert" + input: "bert/embeddings/assert_less_equal/All" + input: "bert/embeddings/assert_less_equal/Assert/Assert/data_0" + input: "bert/embeddings/assert_less_equal/Assert/Assert/data_1" + input: "bert/embeddings/assert_less_equal/x" + input: "bert/embeddings/assert_less_equal/Assert/Assert/data_3" + input: "bert/embeddings/assert_less_equal/y" + attr { + key: "T" + value { + list { + type: DT_STRING + type: DT_STRING + type: DT_INT32 + type: DT_STRING + type: DT_INT32 + } + } + } + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "summarize" + value { + i: 3 + } + } +} +node { + name: "bert/embeddings/position_embeddings/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/position_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\002\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/embeddings/position_embeddings/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/position_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/embeddings/position_embeddings/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/position_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/embeddings/position_embeddings/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/embeddings/position_embeddings/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/position_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/embeddings/position_embeddings/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/embeddings/position_embeddings/Initializer/truncated_normal/TruncatedNormal" + input: "bert/embeddings/position_embeddings/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/position_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/embeddings/position_embeddings/Initializer/truncated_normal" + op: "AddV2" + input: "bert/embeddings/position_embeddings/Initializer/truncated_normal/mul" + input: "bert/embeddings/position_embeddings/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/position_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/embeddings/position_embeddings" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/position_embeddings" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/embeddings/position_embeddings" + } + } +} +node { + name: "bert/embeddings/position_embeddings/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/embeddings/position_embeddings" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/embeddings/position_embeddings/Assign" + op: "AssignVariableOp" + input: "bert/embeddings/position_embeddings" + input: "bert/embeddings/position_embeddings/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/embeddings/position_embeddings/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/position_embeddings" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/embeddings/Slice/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/position_embeddings" + input: "^bert/embeddings/assert_less_equal/Assert/Assert" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/embeddings/Slice/begin" + op: "Const" + input: "^bert/embeddings/assert_less_equal/Assert/Assert" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\000\000\000\000" + } + } + } +} +node { + name: "bert/embeddings/Slice/size" + op: "Const" + input: "^bert/embeddings/assert_less_equal/Assert/Assert" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\001\000\000\377\377\377\377" + } + } + } +} +node { + name: "bert/embeddings/Slice" + op: "Slice" + input: "bert/embeddings/Slice/ReadVariableOp" + input: "bert/embeddings/Slice/begin" + input: "bert/embeddings/Slice/size" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/embeddings/Reshape_4/shape" + op: "Const" + input: "^bert/embeddings/assert_less_equal/Assert/Assert" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\001\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/embeddings/Reshape_4" + op: "Reshape" + input: "bert/embeddings/Slice" + input: "bert/embeddings/Reshape_4/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/embeddings/add_1" + op: "AddV2" + input: "bert/embeddings/add" + input: "bert/embeddings/Reshape_4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/embeddings/layer_normalization/gamma" + } + } +} +node { + name: "bert/embeddings/layer_normalization/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/embeddings/layer_normalization/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/gamma/Assign" + op: "AssignVariableOp" + input: "bert/embeddings/layer_normalization/gamma" + input: "bert/embeddings/layer_normalization/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/embeddings/layer_normalization/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/layer_normalization/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/embeddings/layer_normalization/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/embeddings/layer_normalization/beta" + } + } +} +node { + name: "bert/embeddings/layer_normalization/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/embeddings/layer_normalization/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/beta/Assign" + op: "AssignVariableOp" + input: "bert/embeddings/layer_normalization/beta" + input: "bert/embeddings/layer_normalization/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/embeddings/layer_normalization/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/layer_normalization/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/embeddings/layer_normalization/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/strided_slice" + op: "StridedSlice" + input: "bert/embeddings/layer_normalization/Shape" + input: "bert/embeddings/layer_normalization/strided_slice/stack" + input: "bert/embeddings/layer_normalization/strided_slice/stack_1" + input: "bert/embeddings/layer_normalization/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/embeddings/layer_normalization/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/mul" + op: "Mul" + input: "bert/embeddings/layer_normalization/mul/x" + input: "bert/embeddings/layer_normalization/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/strided_slice_1" + op: "StridedSlice" + input: "bert/embeddings/layer_normalization/Shape" + input: "bert/embeddings/layer_normalization/strided_slice_1/stack" + input: "bert/embeddings/layer_normalization/strided_slice_1/stack_1" + input: "bert/embeddings/layer_normalization/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/embeddings/layer_normalization/mul_1" + op: "Mul" + input: "bert/embeddings/layer_normalization/mul" + input: "bert/embeddings/layer_normalization/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/strided_slice_2/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/strided_slice_2/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/strided_slice_2/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/strided_slice_2" + op: "StridedSlice" + input: "bert/embeddings/layer_normalization/Shape" + input: "bert/embeddings/layer_normalization/strided_slice_2/stack" + input: "bert/embeddings/layer_normalization/strided_slice_2/stack_1" + input: "bert/embeddings/layer_normalization/strided_slice_2/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/embeddings/layer_normalization/mul_2/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/mul_2" + op: "Mul" + input: "bert/embeddings/layer_normalization/mul_2/x" + input: "bert/embeddings/layer_normalization/strided_slice_2" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/Reshape/shape" + op: "Pack" + input: "bert/embeddings/layer_normalization/Reshape/shape/0" + input: "bert/embeddings/layer_normalization/mul_1" + input: "bert/embeddings/layer_normalization/mul_2" + input: "bert/embeddings/layer_normalization/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/embeddings/layer_normalization/Reshape" + op: "Reshape" + input: "bert/embeddings/add_1" + input: "bert/embeddings/layer_normalization/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/ones/packed" + op: "Pack" + input: "bert/embeddings/layer_normalization/mul_1" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/embeddings/layer_normalization/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/ones" + op: "Fill" + input: "bert/embeddings/layer_normalization/ones/packed" + input: "bert/embeddings/layer_normalization/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/embeddings/layer_normalization/zeros/packed" + op: "Pack" + input: "bert/embeddings/layer_normalization/mul_1" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/embeddings/layer_normalization/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/zeros" + op: "Fill" + input: "bert/embeddings/layer_normalization/zeros/packed" + input: "bert/embeddings/layer_normalization/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/embeddings/layer_normalization/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/embeddings/layer_normalization/Reshape" + input: "bert/embeddings/layer_normalization/ones" + input: "bert/embeddings/layer_normalization/zeros" + input: "bert/embeddings/layer_normalization/Const" + input: "bert/embeddings/layer_normalization/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/embeddings/layer_normalization/Reshape_1" + op: "Reshape" + input: "bert/embeddings/layer_normalization/FusedBatchNormV3" + input: "bert/embeddings/layer_normalization/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/mul_3/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/layer_normalization/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/embeddings/layer_normalization/mul_3" + op: "Mul" + input: "bert/embeddings/layer_normalization/Reshape_1" + input: "bert/embeddings/layer_normalization/mul_3/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/layer_normalization/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/embeddings/layer_normalization/add" + op: "AddV2" + input: "bert/embeddings/layer_normalization/mul_3" + input: "bert/embeddings/layer_normalization/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/embeddings/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/embeddings/dropout/Mul" + op: "Mul" + input: "bert/embeddings/layer_normalization/add" + input: "bert/embeddings/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/embeddings/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/embeddings/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/embeddings/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/embeddings/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/embeddings/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/embeddings/dropout/random_uniform/RandomUniform" + input: "bert/embeddings/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/embeddings/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/embeddings/dropout/SelectV2" + op: "SelectV2" + input: "bert/embeddings/dropout/GreaterEqual" + input: "bert/embeddings/dropout/Mul" + input: "bert/embeddings/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/Reshape/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\001\000\000\000\200\001\000\000" + } + } + } +} +node { + name: "bert/encoder/Reshape" + op: "Reshape" + input: "IteratorGetNext:2" + input: "bert/encoder/Reshape/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/Cast" + op: "Cast" + input: "bert/encoder/Reshape" + attr { + key: "DstT" + value { + type: DT_FLOAT + } + } + attr { + key: "SrcT" + value { + type: DT_INT32 + } + } + attr { + key: "Truncate" + value { + b: false + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/ones/shape_as_tensor" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\001\000\000\000" + } + } + } +} +node { + name: "bert/encoder/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/ones" + op: "Fill" + input: "bert/encoder/ones/shape_as_tensor" + input: "bert/encoder/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/mul" + op: "Mul" + input: "bert/encoder/ones" + input: "bert/encoder/Cast" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/Reshape_1/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\377\377\377\377\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/Reshape_1" + op: "Reshape" + input: "bert/embeddings/dropout/SelectV2" + input: "bert/encoder/Reshape_1/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_0/attention/self/query/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_0/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_0/attention/self/query/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_0/attention/self/query/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_0/attention/self/query/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/self/query/kernel" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel" + input: "bert/encoder/layer_0/attention/self/query/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/self/query/bias" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias" + input: "bert/encoder/layer_0/attention/self/query/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/MatMul" + op: "MatMul" + input: "bert/encoder/Reshape_1" + input: "bert/encoder/layer_0/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_0/attention/self/query/MatMul" + input: "bert/encoder/layer_0/attention/self/query/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_0/attention/self/key/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_0/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_0/attention/self/key/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_0/attention/self/key/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_0/attention/self/key/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/self/key/kernel" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel" + input: "bert/encoder/layer_0/attention/self/key/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/self/key/bias" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias" + input: "bert/encoder/layer_0/attention/self/key/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/MatMul" + op: "MatMul" + input: "bert/encoder/Reshape_1" + input: "bert/encoder/layer_0/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_0/attention/self/key/MatMul" + input: "bert/encoder/layer_0/attention/self/key/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_0/attention/self/value/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_0/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_0/attention/self/value/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_0/attention/self/value/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_0/attention/self/value/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/self/value/kernel" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel" + input: "bert/encoder/layer_0/attention/self/value/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/self/value/bias" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias" + input: "bert/encoder/layer_0/attention/self/value/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/MatMul" + op: "MatMul" + input: "bert/encoder/Reshape_1" + input: "bert/encoder/layer_0/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_0/attention/self/value/MatMul" + input: "bert/encoder/layer_0/attention/self/value/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/Reshape/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/Reshape" + op: "Reshape" + input: "bert/encoder/layer_0/attention/self/query/BiasAdd" + input: "bert/encoder/layer_0/attention/self/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/transpose/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/transpose" + op: "Transpose" + input: "bert/encoder/layer_0/attention/self/Reshape" + input: "bert/encoder/layer_0/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/Reshape_1/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_0/attention/self/key/BiasAdd" + input: "bert/encoder/layer_0/attention/self/Reshape_1/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/transpose_1/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/transpose_1" + op: "Transpose" + input: "bert/encoder/layer_0/attention/self/Reshape_1" + input: "bert/encoder/layer_0/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/MatMul" + op: "BatchMatMulV2" + input: "bert/encoder/layer_0/attention/self/transpose" + input: "bert/encoder/layer_0/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/Mul/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.125 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/Mul" + op: "Mul" + input: "bert/encoder/layer_0/attention/self/MatMul" + input: "bert/encoder/layer_0/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/ExpandDims/dim" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/ExpandDims" + op: "ExpandDims" + input: "bert/encoder/mul" + input: "bert/encoder/layer_0/attention/self/ExpandDims/dim" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tdim" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/sub/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/sub" + op: "Sub" + input: "bert/encoder/layer_0/attention/self/sub/x" + input: "bert/encoder/layer_0/attention/self/ExpandDims" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/mul_1/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: -10000.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/mul_1" + op: "Mul" + input: "bert/encoder/layer_0/attention/self/sub" + input: "bert/encoder/layer_0/attention/self/mul_1/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/add" + op: "AddV2" + input: "bert/encoder/layer_0/attention/self/Mul" + input: "bert/encoder/layer_0/attention/self/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/Softmax" + op: "Softmax" + input: "bert/encoder/layer_0/attention/self/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_0/attention/self/Softmax" + input: "bert/encoder/layer_0/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_0/attention/self/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_0/attention/self/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_0/attention/self/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_0/attention/self/dropout/GreaterEqual" + input: "bert/encoder/layer_0/attention/self/dropout/Mul" + input: "bert/encoder/layer_0/attention/self/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/Reshape_2/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/Reshape_2" + op: "Reshape" + input: "bert/encoder/layer_0/attention/self/value/BiasAdd" + input: "bert/encoder/layer_0/attention/self/Reshape_2/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/transpose_2/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/transpose_2" + op: "Transpose" + input: "bert/encoder/layer_0/attention/self/Reshape_2" + input: "bert/encoder/layer_0/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_0/attention/self/dropout/SelectV2" + input: "bert/encoder/layer_0/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/transpose_3/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/transpose_3" + op: "Transpose" + input: "bert/encoder/layer_0/attention/self/MatMul_1" + input: "bert/encoder/layer_0/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/Reshape_3/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/Reshape_3" + op: "Reshape" + input: "bert/encoder/layer_0/attention/self/transpose_3" + input: "bert/encoder/layer_0/attention/self/Reshape_3/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_0/attention/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_0/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_0/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_0/attention/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_0/attention/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel" + input: "bert/encoder/layer_0/attention/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias" + input: "bert/encoder/layer_0/attention/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_0/attention/self/Reshape_3" + input: "bert/encoder/layer_0/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_0/attention/output/dense/MatMul" + input: "bert/encoder/layer_0/attention/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_0/attention/output/dense/BiasAdd" + input: "bert/encoder/layer_0/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_0/attention/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_0/attention/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_0/attention/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_0/attention/output/dropout/GreaterEqual" + input: "bert/encoder/layer_0/attention/output/dropout/Mul" + input: "bert/encoder/layer_0/attention/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/add" + op: "AddV2" + input: "bert/encoder/layer_0/attention/output/dropout/SelectV2" + input: "bert/encoder/Reshape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma" + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta" + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/Shape" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/strided_slice/stack" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/strided_slice/stack_1" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/mul" + op: "Mul" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/mul/x" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/Shape" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/strided_slice_1/stack" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/strided_slice_1/stack_1" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/mul_1" + op: "Mul" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/mul_1/x" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape/shape/0" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/mul" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/mul_1" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape" + op: "Reshape" + input: "bert/encoder/layer_0/attention/output/add" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/ones/packed" + op: "Pack" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/ones" + op: "Fill" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/ones/packed" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/zeros" + op: "Fill" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/zeros/packed" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/ones" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/zeros" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/Const" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/FusedBatchNormV3" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2" + op: "Mul" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape_1" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/add" + op: "AddV2" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_0/intermediate/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_0/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_0/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_0/intermediate/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_0/intermediate/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/intermediate/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel" + input: "bert/encoder/layer_0/intermediate/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_0/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_0/intermediate/dense/bias/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/intermediate/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias" + input: "bert/encoder/layer_0/intermediate/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/add" + input: "bert/encoder/layer_0/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_0/intermediate/dense/MatMul" + input: "bert/encoder/layer_0/intermediate/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/Pow/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 3.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/Pow" + op: "Pow" + input: "bert/encoder/layer_0/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_0/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.044714998453855515 + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/mul" + op: "Mul" + input: "bert/encoder/layer_0/intermediate/dense/mul/x" + input: "bert/encoder/layer_0/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/add" + op: "AddV2" + input: "bert/encoder/layer_0/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_0/intermediate/dense/mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.7978845834732056 + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/mul_1" + op: "Mul" + input: "bert/encoder/layer_0/intermediate/dense/mul_1/x" + input: "bert/encoder/layer_0/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/Tanh" + op: "Tanh" + input: "bert/encoder/layer_0/intermediate/dense/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/add_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/add_1" + op: "AddV2" + input: "bert/encoder/layer_0/intermediate/dense/add_1/x" + input: "bert/encoder/layer_0/intermediate/dense/Tanh" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/mul_2/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.5 + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/mul_2" + op: "Mul" + input: "bert/encoder/layer_0/intermediate/dense/mul_2/x" + input: "bert/encoder/layer_0/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/mul_3" + op: "Mul" + input: "bert/encoder/layer_0/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_0/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_0/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_0/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_0/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_0/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_0/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel" + input: "bert/encoder/layer_0/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/dense/bias" + input: "bert/encoder/layer_0/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_0/intermediate/dense/mul_3" + input: "bert/encoder/layer_0/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_0/output/dense/MatMul" + input: "bert/encoder/layer_0/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_0/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_0/output/dense/BiasAdd" + input: "bert/encoder/layer_0/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_0/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_0/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_0/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_0/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_0/output/dropout/GreaterEqual" + input: "bert/encoder/layer_0/output/dropout/Mul" + input: "bert/encoder/layer_0/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/add" + op: "AddV2" + input: "bert/encoder/layer_0/output/dropout/SelectV2" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/output/layer_normalization_2/gamma" + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/output/layer_normalization_2/beta" + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_0/output/layer_normalization_2/Shape" + input: "bert/encoder/layer_0/output/layer_normalization_2/strided_slice/stack" + input: "bert/encoder/layer_0/output/layer_normalization_2/strided_slice/stack_1" + input: "bert/encoder/layer_0/output/layer_normalization_2/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/mul" + op: "Mul" + input: "bert/encoder/layer_0/output/layer_normalization_2/mul/x" + input: "bert/encoder/layer_0/output/layer_normalization_2/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_0/output/layer_normalization_2/Shape" + input: "bert/encoder/layer_0/output/layer_normalization_2/strided_slice_1/stack" + input: "bert/encoder/layer_0/output/layer_normalization_2/strided_slice_1/stack_1" + input: "bert/encoder/layer_0/output/layer_normalization_2/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/mul_1" + op: "Mul" + input: "bert/encoder/layer_0/output/layer_normalization_2/mul_1/x" + input: "bert/encoder/layer_0/output/layer_normalization_2/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_0/output/layer_normalization_2/Reshape/shape/0" + input: "bert/encoder/layer_0/output/layer_normalization_2/mul" + input: "bert/encoder/layer_0/output/layer_normalization_2/mul_1" + input: "bert/encoder/layer_0/output/layer_normalization_2/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/Reshape" + op: "Reshape" + input: "bert/encoder/layer_0/output/add" + input: "bert/encoder/layer_0/output/layer_normalization_2/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/ones/packed" + op: "Pack" + input: "bert/encoder/layer_0/output/layer_normalization_2/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/ones" + op: "Fill" + input: "bert/encoder/layer_0/output/layer_normalization_2/ones/packed" + input: "bert/encoder/layer_0/output/layer_normalization_2/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_0/output/layer_normalization_2/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/zeros" + op: "Fill" + input: "bert/encoder/layer_0/output/layer_normalization_2/zeros/packed" + input: "bert/encoder/layer_0/output/layer_normalization_2/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_0/output/layer_normalization_2/Reshape" + input: "bert/encoder/layer_0/output/layer_normalization_2/ones" + input: "bert/encoder/layer_0/output/layer_normalization_2/zeros" + input: "bert/encoder/layer_0/output/layer_normalization_2/Const" + input: "bert/encoder/layer_0/output/layer_normalization_2/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_0/output/layer_normalization_2/FusedBatchNormV3" + input: "bert/encoder/layer_0/output/layer_normalization_2/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/mul_2" + op: "Mul" + input: "bert/encoder/layer_0/output/layer_normalization_2/Reshape_1" + input: "bert/encoder/layer_0/output/layer_normalization_2/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/add" + op: "AddV2" + input: "bert/encoder/layer_0/output/layer_normalization_2/mul_2" + input: "bert/encoder/layer_0/output/layer_normalization_2/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_1/attention/self/query/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_1/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_1/attention/self/query/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_1/attention/self/query/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_1/attention/self/query/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/self/query/kernel" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel" + input: "bert/encoder/layer_1/attention/self/query/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/self/query/bias" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias" + input: "bert/encoder/layer_1/attention/self/query/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/MatMul" + op: "MatMul" + input: "bert/encoder/layer_0/output/layer_normalization_2/add" + input: "bert/encoder/layer_1/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_1/attention/self/query/MatMul" + input: "bert/encoder/layer_1/attention/self/query/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_1/attention/self/key/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_1/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_1/attention/self/key/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_1/attention/self/key/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_1/attention/self/key/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/self/key/kernel" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel" + input: "bert/encoder/layer_1/attention/self/key/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/self/key/bias" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias" + input: "bert/encoder/layer_1/attention/self/key/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/MatMul" + op: "MatMul" + input: "bert/encoder/layer_0/output/layer_normalization_2/add" + input: "bert/encoder/layer_1/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_1/attention/self/key/MatMul" + input: "bert/encoder/layer_1/attention/self/key/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_1/attention/self/value/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_1/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_1/attention/self/value/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_1/attention/self/value/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_1/attention/self/value/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/self/value/kernel" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel" + input: "bert/encoder/layer_1/attention/self/value/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/self/value/bias" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias" + input: "bert/encoder/layer_1/attention/self/value/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/MatMul" + op: "MatMul" + input: "bert/encoder/layer_0/output/layer_normalization_2/add" + input: "bert/encoder/layer_1/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_1/attention/self/value/MatMul" + input: "bert/encoder/layer_1/attention/self/value/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/Reshape/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/Reshape" + op: "Reshape" + input: "bert/encoder/layer_1/attention/self/query/BiasAdd" + input: "bert/encoder/layer_1/attention/self/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/transpose/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/transpose" + op: "Transpose" + input: "bert/encoder/layer_1/attention/self/Reshape" + input: "bert/encoder/layer_1/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/Reshape_1/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_1/attention/self/key/BiasAdd" + input: "bert/encoder/layer_1/attention/self/Reshape_1/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/transpose_1/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/transpose_1" + op: "Transpose" + input: "bert/encoder/layer_1/attention/self/Reshape_1" + input: "bert/encoder/layer_1/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/MatMul" + op: "BatchMatMulV2" + input: "bert/encoder/layer_1/attention/self/transpose" + input: "bert/encoder/layer_1/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/Mul/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.125 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/Mul" + op: "Mul" + input: "bert/encoder/layer_1/attention/self/MatMul" + input: "bert/encoder/layer_1/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/ExpandDims/dim" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/ExpandDims" + op: "ExpandDims" + input: "bert/encoder/mul" + input: "bert/encoder/layer_1/attention/self/ExpandDims/dim" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tdim" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/sub/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/sub" + op: "Sub" + input: "bert/encoder/layer_1/attention/self/sub/x" + input: "bert/encoder/layer_1/attention/self/ExpandDims" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/mul_1/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: -10000.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/mul_1" + op: "Mul" + input: "bert/encoder/layer_1/attention/self/sub" + input: "bert/encoder/layer_1/attention/self/mul_1/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/add" + op: "AddV2" + input: "bert/encoder/layer_1/attention/self/Mul" + input: "bert/encoder/layer_1/attention/self/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/Softmax" + op: "Softmax" + input: "bert/encoder/layer_1/attention/self/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_1/attention/self/Softmax" + input: "bert/encoder/layer_1/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_1/attention/self/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_1/attention/self/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_1/attention/self/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_1/attention/self/dropout/GreaterEqual" + input: "bert/encoder/layer_1/attention/self/dropout/Mul" + input: "bert/encoder/layer_1/attention/self/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/Reshape_2/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/Reshape_2" + op: "Reshape" + input: "bert/encoder/layer_1/attention/self/value/BiasAdd" + input: "bert/encoder/layer_1/attention/self/Reshape_2/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/transpose_2/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/transpose_2" + op: "Transpose" + input: "bert/encoder/layer_1/attention/self/Reshape_2" + input: "bert/encoder/layer_1/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_1/attention/self/dropout/SelectV2" + input: "bert/encoder/layer_1/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/transpose_3/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/transpose_3" + op: "Transpose" + input: "bert/encoder/layer_1/attention/self/MatMul_1" + input: "bert/encoder/layer_1/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/Reshape_3/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/Reshape_3" + op: "Reshape" + input: "bert/encoder/layer_1/attention/self/transpose_3" + input: "bert/encoder/layer_1/attention/self/Reshape_3/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_1/attention/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_1/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_1/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_1/attention/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_1/attention/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel" + input: "bert/encoder/layer_1/attention/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias" + input: "bert/encoder/layer_1/attention/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_1/attention/self/Reshape_3" + input: "bert/encoder/layer_1/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_1/attention/output/dense/MatMul" + input: "bert/encoder/layer_1/attention/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_1/attention/output/dense/BiasAdd" + input: "bert/encoder/layer_1/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_1/attention/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_1/attention/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_1/attention/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_1/attention/output/dropout/GreaterEqual" + input: "bert/encoder/layer_1/attention/output/dropout/Mul" + input: "bert/encoder/layer_1/attention/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/add" + op: "AddV2" + input: "bert/encoder/layer_1/attention/output/dropout/SelectV2" + input: "bert/encoder/layer_0/output/layer_normalization_2/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma" + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta" + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/Shape" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/strided_slice/stack" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/strided_slice/stack_1" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/mul" + op: "Mul" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/mul/x" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/Shape" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/strided_slice_1/stack" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/strided_slice_1/stack_1" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/mul_1" + op: "Mul" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/mul_1/x" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape/shape/0" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/mul" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/mul_1" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape" + op: "Reshape" + input: "bert/encoder/layer_1/attention/output/add" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/ones/packed" + op: "Pack" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/ones" + op: "Fill" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/ones/packed" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/zeros" + op: "Fill" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/zeros/packed" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/ones" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/zeros" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/Const" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/FusedBatchNormV3" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2" + op: "Mul" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape_1" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/add" + op: "AddV2" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_1/intermediate/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_1/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_1/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_1/intermediate/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_1/intermediate/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/intermediate/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel" + input: "bert/encoder/layer_1/intermediate/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_1/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_1/intermediate/dense/bias/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/intermediate/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias" + input: "bert/encoder/layer_1/intermediate/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/add" + input: "bert/encoder/layer_1/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_1/intermediate/dense/MatMul" + input: "bert/encoder/layer_1/intermediate/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/Pow/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 3.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/Pow" + op: "Pow" + input: "bert/encoder/layer_1/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_1/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.044714998453855515 + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/mul" + op: "Mul" + input: "bert/encoder/layer_1/intermediate/dense/mul/x" + input: "bert/encoder/layer_1/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/add" + op: "AddV2" + input: "bert/encoder/layer_1/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_1/intermediate/dense/mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.7978845834732056 + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/mul_1" + op: "Mul" + input: "bert/encoder/layer_1/intermediate/dense/mul_1/x" + input: "bert/encoder/layer_1/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/Tanh" + op: "Tanh" + input: "bert/encoder/layer_1/intermediate/dense/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/add_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/add_1" + op: "AddV2" + input: "bert/encoder/layer_1/intermediate/dense/add_1/x" + input: "bert/encoder/layer_1/intermediate/dense/Tanh" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/mul_2/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.5 + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/mul_2" + op: "Mul" + input: "bert/encoder/layer_1/intermediate/dense/mul_2/x" + input: "bert/encoder/layer_1/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/mul_3" + op: "Mul" + input: "bert/encoder/layer_1/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_1/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_1/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_1/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_1/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_1/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_1/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel" + input: "bert/encoder/layer_1/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/dense/bias" + input: "bert/encoder/layer_1/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_1/intermediate/dense/mul_3" + input: "bert/encoder/layer_1/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_1/output/dense/MatMul" + input: "bert/encoder/layer_1/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_1/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_1/output/dense/BiasAdd" + input: "bert/encoder/layer_1/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_1/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_1/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_1/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_1/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_1/output/dropout/GreaterEqual" + input: "bert/encoder/layer_1/output/dropout/Mul" + input: "bert/encoder/layer_1/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/add" + op: "AddV2" + input: "bert/encoder/layer_1/output/dropout/SelectV2" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/output/layer_normalization_4/gamma" + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/output/layer_normalization_4/beta" + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_1/output/layer_normalization_4/Shape" + input: "bert/encoder/layer_1/output/layer_normalization_4/strided_slice/stack" + input: "bert/encoder/layer_1/output/layer_normalization_4/strided_slice/stack_1" + input: "bert/encoder/layer_1/output/layer_normalization_4/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/mul" + op: "Mul" + input: "bert/encoder/layer_1/output/layer_normalization_4/mul/x" + input: "bert/encoder/layer_1/output/layer_normalization_4/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_1/output/layer_normalization_4/Shape" + input: "bert/encoder/layer_1/output/layer_normalization_4/strided_slice_1/stack" + input: "bert/encoder/layer_1/output/layer_normalization_4/strided_slice_1/stack_1" + input: "bert/encoder/layer_1/output/layer_normalization_4/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/mul_1" + op: "Mul" + input: "bert/encoder/layer_1/output/layer_normalization_4/mul_1/x" + input: "bert/encoder/layer_1/output/layer_normalization_4/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_1/output/layer_normalization_4/Reshape/shape/0" + input: "bert/encoder/layer_1/output/layer_normalization_4/mul" + input: "bert/encoder/layer_1/output/layer_normalization_4/mul_1" + input: "bert/encoder/layer_1/output/layer_normalization_4/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/Reshape" + op: "Reshape" + input: "bert/encoder/layer_1/output/add" + input: "bert/encoder/layer_1/output/layer_normalization_4/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/ones/packed" + op: "Pack" + input: "bert/encoder/layer_1/output/layer_normalization_4/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/ones" + op: "Fill" + input: "bert/encoder/layer_1/output/layer_normalization_4/ones/packed" + input: "bert/encoder/layer_1/output/layer_normalization_4/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_1/output/layer_normalization_4/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/zeros" + op: "Fill" + input: "bert/encoder/layer_1/output/layer_normalization_4/zeros/packed" + input: "bert/encoder/layer_1/output/layer_normalization_4/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_1/output/layer_normalization_4/Reshape" + input: "bert/encoder/layer_1/output/layer_normalization_4/ones" + input: "bert/encoder/layer_1/output/layer_normalization_4/zeros" + input: "bert/encoder/layer_1/output/layer_normalization_4/Const" + input: "bert/encoder/layer_1/output/layer_normalization_4/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_1/output/layer_normalization_4/FusedBatchNormV3" + input: "bert/encoder/layer_1/output/layer_normalization_4/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/mul_2" + op: "Mul" + input: "bert/encoder/layer_1/output/layer_normalization_4/Reshape_1" + input: "bert/encoder/layer_1/output/layer_normalization_4/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/add" + op: "AddV2" + input: "bert/encoder/layer_1/output/layer_normalization_4/mul_2" + input: "bert/encoder/layer_1/output/layer_normalization_4/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_2/attention/self/query/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_2/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_2/attention/self/query/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_2/attention/self/query/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_2/attention/self/query/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/self/query/kernel" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel" + input: "bert/encoder/layer_2/attention/self/query/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/self/query/bias" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias" + input: "bert/encoder/layer_2/attention/self/query/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/MatMul" + op: "MatMul" + input: "bert/encoder/layer_1/output/layer_normalization_4/add" + input: "bert/encoder/layer_2/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_2/attention/self/query/MatMul" + input: "bert/encoder/layer_2/attention/self/query/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_2/attention/self/key/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_2/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_2/attention/self/key/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_2/attention/self/key/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_2/attention/self/key/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/self/key/kernel" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel" + input: "bert/encoder/layer_2/attention/self/key/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/self/key/bias" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias" + input: "bert/encoder/layer_2/attention/self/key/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/MatMul" + op: "MatMul" + input: "bert/encoder/layer_1/output/layer_normalization_4/add" + input: "bert/encoder/layer_2/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_2/attention/self/key/MatMul" + input: "bert/encoder/layer_2/attention/self/key/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_2/attention/self/value/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_2/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_2/attention/self/value/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_2/attention/self/value/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_2/attention/self/value/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/self/value/kernel" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel" + input: "bert/encoder/layer_2/attention/self/value/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/self/value/bias" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias" + input: "bert/encoder/layer_2/attention/self/value/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/MatMul" + op: "MatMul" + input: "bert/encoder/layer_1/output/layer_normalization_4/add" + input: "bert/encoder/layer_2/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_2/attention/self/value/MatMul" + input: "bert/encoder/layer_2/attention/self/value/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/Reshape/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/Reshape" + op: "Reshape" + input: "bert/encoder/layer_2/attention/self/query/BiasAdd" + input: "bert/encoder/layer_2/attention/self/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/transpose/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/transpose" + op: "Transpose" + input: "bert/encoder/layer_2/attention/self/Reshape" + input: "bert/encoder/layer_2/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/Reshape_1/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_2/attention/self/key/BiasAdd" + input: "bert/encoder/layer_2/attention/self/Reshape_1/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/transpose_1/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/transpose_1" + op: "Transpose" + input: "bert/encoder/layer_2/attention/self/Reshape_1" + input: "bert/encoder/layer_2/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/MatMul" + op: "BatchMatMulV2" + input: "bert/encoder/layer_2/attention/self/transpose" + input: "bert/encoder/layer_2/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/Mul/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.125 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/Mul" + op: "Mul" + input: "bert/encoder/layer_2/attention/self/MatMul" + input: "bert/encoder/layer_2/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/ExpandDims/dim" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/ExpandDims" + op: "ExpandDims" + input: "bert/encoder/mul" + input: "bert/encoder/layer_2/attention/self/ExpandDims/dim" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tdim" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/sub/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/sub" + op: "Sub" + input: "bert/encoder/layer_2/attention/self/sub/x" + input: "bert/encoder/layer_2/attention/self/ExpandDims" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/mul_1/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: -10000.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/mul_1" + op: "Mul" + input: "bert/encoder/layer_2/attention/self/sub" + input: "bert/encoder/layer_2/attention/self/mul_1/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/add" + op: "AddV2" + input: "bert/encoder/layer_2/attention/self/Mul" + input: "bert/encoder/layer_2/attention/self/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/Softmax" + op: "Softmax" + input: "bert/encoder/layer_2/attention/self/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_2/attention/self/Softmax" + input: "bert/encoder/layer_2/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_2/attention/self/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_2/attention/self/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_2/attention/self/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_2/attention/self/dropout/GreaterEqual" + input: "bert/encoder/layer_2/attention/self/dropout/Mul" + input: "bert/encoder/layer_2/attention/self/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/Reshape_2/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/Reshape_2" + op: "Reshape" + input: "bert/encoder/layer_2/attention/self/value/BiasAdd" + input: "bert/encoder/layer_2/attention/self/Reshape_2/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/transpose_2/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/transpose_2" + op: "Transpose" + input: "bert/encoder/layer_2/attention/self/Reshape_2" + input: "bert/encoder/layer_2/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_2/attention/self/dropout/SelectV2" + input: "bert/encoder/layer_2/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/transpose_3/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/transpose_3" + op: "Transpose" + input: "bert/encoder/layer_2/attention/self/MatMul_1" + input: "bert/encoder/layer_2/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/Reshape_3/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/Reshape_3" + op: "Reshape" + input: "bert/encoder/layer_2/attention/self/transpose_3" + input: "bert/encoder/layer_2/attention/self/Reshape_3/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_2/attention/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_2/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_2/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_2/attention/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_2/attention/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel" + input: "bert/encoder/layer_2/attention/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias" + input: "bert/encoder/layer_2/attention/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_2/attention/self/Reshape_3" + input: "bert/encoder/layer_2/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_2/attention/output/dense/MatMul" + input: "bert/encoder/layer_2/attention/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_2/attention/output/dense/BiasAdd" + input: "bert/encoder/layer_2/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_2/attention/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_2/attention/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_2/attention/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_2/attention/output/dropout/GreaterEqual" + input: "bert/encoder/layer_2/attention/output/dropout/Mul" + input: "bert/encoder/layer_2/attention/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/add" + op: "AddV2" + input: "bert/encoder/layer_2/attention/output/dropout/SelectV2" + input: "bert/encoder/layer_1/output/layer_normalization_4/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma" + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta" + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/Shape" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/strided_slice/stack" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/strided_slice/stack_1" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/mul" + op: "Mul" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/mul/x" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/Shape" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/strided_slice_1/stack" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/strided_slice_1/stack_1" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/mul_1" + op: "Mul" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/mul_1/x" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape/shape/0" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/mul" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/mul_1" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape" + op: "Reshape" + input: "bert/encoder/layer_2/attention/output/add" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/ones/packed" + op: "Pack" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/ones" + op: "Fill" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/ones/packed" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/zeros" + op: "Fill" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/zeros/packed" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/ones" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/zeros" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/Const" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/FusedBatchNormV3" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2" + op: "Mul" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape_1" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/add" + op: "AddV2" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_2/intermediate/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_2/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_2/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_2/intermediate/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_2/intermediate/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/intermediate/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel" + input: "bert/encoder/layer_2/intermediate/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_2/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_2/intermediate/dense/bias/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/intermediate/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias" + input: "bert/encoder/layer_2/intermediate/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/add" + input: "bert/encoder/layer_2/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_2/intermediate/dense/MatMul" + input: "bert/encoder/layer_2/intermediate/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/Pow/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 3.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/Pow" + op: "Pow" + input: "bert/encoder/layer_2/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_2/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.044714998453855515 + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/mul" + op: "Mul" + input: "bert/encoder/layer_2/intermediate/dense/mul/x" + input: "bert/encoder/layer_2/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/add" + op: "AddV2" + input: "bert/encoder/layer_2/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_2/intermediate/dense/mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.7978845834732056 + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/mul_1" + op: "Mul" + input: "bert/encoder/layer_2/intermediate/dense/mul_1/x" + input: "bert/encoder/layer_2/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/Tanh" + op: "Tanh" + input: "bert/encoder/layer_2/intermediate/dense/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/add_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/add_1" + op: "AddV2" + input: "bert/encoder/layer_2/intermediate/dense/add_1/x" + input: "bert/encoder/layer_2/intermediate/dense/Tanh" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/mul_2/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.5 + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/mul_2" + op: "Mul" + input: "bert/encoder/layer_2/intermediate/dense/mul_2/x" + input: "bert/encoder/layer_2/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/mul_3" + op: "Mul" + input: "bert/encoder/layer_2/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_2/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_2/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_2/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_2/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_2/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_2/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel" + input: "bert/encoder/layer_2/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/dense/bias" + input: "bert/encoder/layer_2/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_2/intermediate/dense/mul_3" + input: "bert/encoder/layer_2/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_2/output/dense/MatMul" + input: "bert/encoder/layer_2/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_2/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_2/output/dense/BiasAdd" + input: "bert/encoder/layer_2/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_2/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_2/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_2/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_2/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_2/output/dropout/GreaterEqual" + input: "bert/encoder/layer_2/output/dropout/Mul" + input: "bert/encoder/layer_2/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/add" + op: "AddV2" + input: "bert/encoder/layer_2/output/dropout/SelectV2" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/output/layer_normalization_6/gamma" + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/output/layer_normalization_6/beta" + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_2/output/layer_normalization_6/Shape" + input: "bert/encoder/layer_2/output/layer_normalization_6/strided_slice/stack" + input: "bert/encoder/layer_2/output/layer_normalization_6/strided_slice/stack_1" + input: "bert/encoder/layer_2/output/layer_normalization_6/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/mul" + op: "Mul" + input: "bert/encoder/layer_2/output/layer_normalization_6/mul/x" + input: "bert/encoder/layer_2/output/layer_normalization_6/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_2/output/layer_normalization_6/Shape" + input: "bert/encoder/layer_2/output/layer_normalization_6/strided_slice_1/stack" + input: "bert/encoder/layer_2/output/layer_normalization_6/strided_slice_1/stack_1" + input: "bert/encoder/layer_2/output/layer_normalization_6/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/mul_1" + op: "Mul" + input: "bert/encoder/layer_2/output/layer_normalization_6/mul_1/x" + input: "bert/encoder/layer_2/output/layer_normalization_6/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_2/output/layer_normalization_6/Reshape/shape/0" + input: "bert/encoder/layer_2/output/layer_normalization_6/mul" + input: "bert/encoder/layer_2/output/layer_normalization_6/mul_1" + input: "bert/encoder/layer_2/output/layer_normalization_6/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/Reshape" + op: "Reshape" + input: "bert/encoder/layer_2/output/add" + input: "bert/encoder/layer_2/output/layer_normalization_6/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/ones/packed" + op: "Pack" + input: "bert/encoder/layer_2/output/layer_normalization_6/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/ones" + op: "Fill" + input: "bert/encoder/layer_2/output/layer_normalization_6/ones/packed" + input: "bert/encoder/layer_2/output/layer_normalization_6/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_2/output/layer_normalization_6/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/zeros" + op: "Fill" + input: "bert/encoder/layer_2/output/layer_normalization_6/zeros/packed" + input: "bert/encoder/layer_2/output/layer_normalization_6/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_2/output/layer_normalization_6/Reshape" + input: "bert/encoder/layer_2/output/layer_normalization_6/ones" + input: "bert/encoder/layer_2/output/layer_normalization_6/zeros" + input: "bert/encoder/layer_2/output/layer_normalization_6/Const" + input: "bert/encoder/layer_2/output/layer_normalization_6/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_2/output/layer_normalization_6/FusedBatchNormV3" + input: "bert/encoder/layer_2/output/layer_normalization_6/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/mul_2" + op: "Mul" + input: "bert/encoder/layer_2/output/layer_normalization_6/Reshape_1" + input: "bert/encoder/layer_2/output/layer_normalization_6/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/add" + op: "AddV2" + input: "bert/encoder/layer_2/output/layer_normalization_6/mul_2" + input: "bert/encoder/layer_2/output/layer_normalization_6/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_3/attention/self/query/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_3/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_3/attention/self/query/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_3/attention/self/query/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_3/attention/self/query/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/self/query/kernel" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel" + input: "bert/encoder/layer_3/attention/self/query/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/self/query/bias" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias" + input: "bert/encoder/layer_3/attention/self/query/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/MatMul" + op: "MatMul" + input: "bert/encoder/layer_2/output/layer_normalization_6/add" + input: "bert/encoder/layer_3/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_3/attention/self/query/MatMul" + input: "bert/encoder/layer_3/attention/self/query/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_3/attention/self/key/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_3/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_3/attention/self/key/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_3/attention/self/key/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_3/attention/self/key/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/self/key/kernel" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel" + input: "bert/encoder/layer_3/attention/self/key/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/self/key/bias" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias" + input: "bert/encoder/layer_3/attention/self/key/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/MatMul" + op: "MatMul" + input: "bert/encoder/layer_2/output/layer_normalization_6/add" + input: "bert/encoder/layer_3/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_3/attention/self/key/MatMul" + input: "bert/encoder/layer_3/attention/self/key/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_3/attention/self/value/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_3/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_3/attention/self/value/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_3/attention/self/value/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_3/attention/self/value/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/self/value/kernel" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel" + input: "bert/encoder/layer_3/attention/self/value/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/self/value/bias" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias" + input: "bert/encoder/layer_3/attention/self/value/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/MatMul" + op: "MatMul" + input: "bert/encoder/layer_2/output/layer_normalization_6/add" + input: "bert/encoder/layer_3/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_3/attention/self/value/MatMul" + input: "bert/encoder/layer_3/attention/self/value/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/Reshape/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/Reshape" + op: "Reshape" + input: "bert/encoder/layer_3/attention/self/query/BiasAdd" + input: "bert/encoder/layer_3/attention/self/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/transpose/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/transpose" + op: "Transpose" + input: "bert/encoder/layer_3/attention/self/Reshape" + input: "bert/encoder/layer_3/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/Reshape_1/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_3/attention/self/key/BiasAdd" + input: "bert/encoder/layer_3/attention/self/Reshape_1/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/transpose_1/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/transpose_1" + op: "Transpose" + input: "bert/encoder/layer_3/attention/self/Reshape_1" + input: "bert/encoder/layer_3/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/MatMul" + op: "BatchMatMulV2" + input: "bert/encoder/layer_3/attention/self/transpose" + input: "bert/encoder/layer_3/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/Mul/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.125 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/Mul" + op: "Mul" + input: "bert/encoder/layer_3/attention/self/MatMul" + input: "bert/encoder/layer_3/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/ExpandDims/dim" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/ExpandDims" + op: "ExpandDims" + input: "bert/encoder/mul" + input: "bert/encoder/layer_3/attention/self/ExpandDims/dim" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tdim" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/sub/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/sub" + op: "Sub" + input: "bert/encoder/layer_3/attention/self/sub/x" + input: "bert/encoder/layer_3/attention/self/ExpandDims" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/mul_1/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: -10000.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/mul_1" + op: "Mul" + input: "bert/encoder/layer_3/attention/self/sub" + input: "bert/encoder/layer_3/attention/self/mul_1/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/add" + op: "AddV2" + input: "bert/encoder/layer_3/attention/self/Mul" + input: "bert/encoder/layer_3/attention/self/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/Softmax" + op: "Softmax" + input: "bert/encoder/layer_3/attention/self/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_3/attention/self/Softmax" + input: "bert/encoder/layer_3/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_3/attention/self/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_3/attention/self/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_3/attention/self/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_3/attention/self/dropout/GreaterEqual" + input: "bert/encoder/layer_3/attention/self/dropout/Mul" + input: "bert/encoder/layer_3/attention/self/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/Reshape_2/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/Reshape_2" + op: "Reshape" + input: "bert/encoder/layer_3/attention/self/value/BiasAdd" + input: "bert/encoder/layer_3/attention/self/Reshape_2/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/transpose_2/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/transpose_2" + op: "Transpose" + input: "bert/encoder/layer_3/attention/self/Reshape_2" + input: "bert/encoder/layer_3/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_3/attention/self/dropout/SelectV2" + input: "bert/encoder/layer_3/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/transpose_3/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/transpose_3" + op: "Transpose" + input: "bert/encoder/layer_3/attention/self/MatMul_1" + input: "bert/encoder/layer_3/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/Reshape_3/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/Reshape_3" + op: "Reshape" + input: "bert/encoder/layer_3/attention/self/transpose_3" + input: "bert/encoder/layer_3/attention/self/Reshape_3/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_3/attention/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_3/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_3/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_3/attention/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_3/attention/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel" + input: "bert/encoder/layer_3/attention/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias" + input: "bert/encoder/layer_3/attention/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_3/attention/self/Reshape_3" + input: "bert/encoder/layer_3/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_3/attention/output/dense/MatMul" + input: "bert/encoder/layer_3/attention/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_3/attention/output/dense/BiasAdd" + input: "bert/encoder/layer_3/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_3/attention/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_3/attention/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_3/attention/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_3/attention/output/dropout/GreaterEqual" + input: "bert/encoder/layer_3/attention/output/dropout/Mul" + input: "bert/encoder/layer_3/attention/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/add" + op: "AddV2" + input: "bert/encoder/layer_3/attention/output/dropout/SelectV2" + input: "bert/encoder/layer_2/output/layer_normalization_6/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma" + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta" + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/Shape" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/strided_slice/stack" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/strided_slice/stack_1" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/mul" + op: "Mul" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/mul/x" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/Shape" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/strided_slice_1/stack" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/strided_slice_1/stack_1" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/mul_1" + op: "Mul" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/mul_1/x" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape/shape/0" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/mul" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/mul_1" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape" + op: "Reshape" + input: "bert/encoder/layer_3/attention/output/add" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/ones/packed" + op: "Pack" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/ones" + op: "Fill" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/ones/packed" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/zeros" + op: "Fill" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/zeros/packed" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/ones" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/zeros" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/Const" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/FusedBatchNormV3" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2" + op: "Mul" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape_1" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/add" + op: "AddV2" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_3/intermediate/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_3/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_3/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_3/intermediate/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_3/intermediate/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/intermediate/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel" + input: "bert/encoder/layer_3/intermediate/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_3/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_3/intermediate/dense/bias/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/intermediate/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias" + input: "bert/encoder/layer_3/intermediate/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/add" + input: "bert/encoder/layer_3/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_3/intermediate/dense/MatMul" + input: "bert/encoder/layer_3/intermediate/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/Pow/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 3.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/Pow" + op: "Pow" + input: "bert/encoder/layer_3/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_3/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.044714998453855515 + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/mul" + op: "Mul" + input: "bert/encoder/layer_3/intermediate/dense/mul/x" + input: "bert/encoder/layer_3/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/add" + op: "AddV2" + input: "bert/encoder/layer_3/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_3/intermediate/dense/mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.7978845834732056 + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/mul_1" + op: "Mul" + input: "bert/encoder/layer_3/intermediate/dense/mul_1/x" + input: "bert/encoder/layer_3/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/Tanh" + op: "Tanh" + input: "bert/encoder/layer_3/intermediate/dense/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/add_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/add_1" + op: "AddV2" + input: "bert/encoder/layer_3/intermediate/dense/add_1/x" + input: "bert/encoder/layer_3/intermediate/dense/Tanh" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/mul_2/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.5 + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/mul_2" + op: "Mul" + input: "bert/encoder/layer_3/intermediate/dense/mul_2/x" + input: "bert/encoder/layer_3/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/mul_3" + op: "Mul" + input: "bert/encoder/layer_3/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_3/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_3/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_3/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_3/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_3/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_3/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel" + input: "bert/encoder/layer_3/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/dense/bias" + input: "bert/encoder/layer_3/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_3/intermediate/dense/mul_3" + input: "bert/encoder/layer_3/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_3/output/dense/MatMul" + input: "bert/encoder/layer_3/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_3/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_3/output/dense/BiasAdd" + input: "bert/encoder/layer_3/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_3/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_3/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_3/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_3/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_3/output/dropout/GreaterEqual" + input: "bert/encoder/layer_3/output/dropout/Mul" + input: "bert/encoder/layer_3/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/add" + op: "AddV2" + input: "bert/encoder/layer_3/output/dropout/SelectV2" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/output/layer_normalization_8/gamma" + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/output/layer_normalization_8/beta" + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_3/output/layer_normalization_8/Shape" + input: "bert/encoder/layer_3/output/layer_normalization_8/strided_slice/stack" + input: "bert/encoder/layer_3/output/layer_normalization_8/strided_slice/stack_1" + input: "bert/encoder/layer_3/output/layer_normalization_8/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/mul" + op: "Mul" + input: "bert/encoder/layer_3/output/layer_normalization_8/mul/x" + input: "bert/encoder/layer_3/output/layer_normalization_8/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_3/output/layer_normalization_8/Shape" + input: "bert/encoder/layer_3/output/layer_normalization_8/strided_slice_1/stack" + input: "bert/encoder/layer_3/output/layer_normalization_8/strided_slice_1/stack_1" + input: "bert/encoder/layer_3/output/layer_normalization_8/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/mul_1" + op: "Mul" + input: "bert/encoder/layer_3/output/layer_normalization_8/mul_1/x" + input: "bert/encoder/layer_3/output/layer_normalization_8/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_3/output/layer_normalization_8/Reshape/shape/0" + input: "bert/encoder/layer_3/output/layer_normalization_8/mul" + input: "bert/encoder/layer_3/output/layer_normalization_8/mul_1" + input: "bert/encoder/layer_3/output/layer_normalization_8/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/Reshape" + op: "Reshape" + input: "bert/encoder/layer_3/output/add" + input: "bert/encoder/layer_3/output/layer_normalization_8/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/ones/packed" + op: "Pack" + input: "bert/encoder/layer_3/output/layer_normalization_8/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/ones" + op: "Fill" + input: "bert/encoder/layer_3/output/layer_normalization_8/ones/packed" + input: "bert/encoder/layer_3/output/layer_normalization_8/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_3/output/layer_normalization_8/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/zeros" + op: "Fill" + input: "bert/encoder/layer_3/output/layer_normalization_8/zeros/packed" + input: "bert/encoder/layer_3/output/layer_normalization_8/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_3/output/layer_normalization_8/Reshape" + input: "bert/encoder/layer_3/output/layer_normalization_8/ones" + input: "bert/encoder/layer_3/output/layer_normalization_8/zeros" + input: "bert/encoder/layer_3/output/layer_normalization_8/Const" + input: "bert/encoder/layer_3/output/layer_normalization_8/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_3/output/layer_normalization_8/FusedBatchNormV3" + input: "bert/encoder/layer_3/output/layer_normalization_8/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/mul_2" + op: "Mul" + input: "bert/encoder/layer_3/output/layer_normalization_8/Reshape_1" + input: "bert/encoder/layer_3/output/layer_normalization_8/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/add" + op: "AddV2" + input: "bert/encoder/layer_3/output/layer_normalization_8/mul_2" + input: "bert/encoder/layer_3/output/layer_normalization_8/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_4/attention/self/query/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_4/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_4/attention/self/query/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_4/attention/self/query/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_4/attention/self/query/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/self/query/kernel" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel" + input: "bert/encoder/layer_4/attention/self/query/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/self/query/bias" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias" + input: "bert/encoder/layer_4/attention/self/query/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/MatMul" + op: "MatMul" + input: "bert/encoder/layer_3/output/layer_normalization_8/add" + input: "bert/encoder/layer_4/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_4/attention/self/query/MatMul" + input: "bert/encoder/layer_4/attention/self/query/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_4/attention/self/key/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_4/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_4/attention/self/key/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_4/attention/self/key/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_4/attention/self/key/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/self/key/kernel" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel" + input: "bert/encoder/layer_4/attention/self/key/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/self/key/bias" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias" + input: "bert/encoder/layer_4/attention/self/key/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/MatMul" + op: "MatMul" + input: "bert/encoder/layer_3/output/layer_normalization_8/add" + input: "bert/encoder/layer_4/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_4/attention/self/key/MatMul" + input: "bert/encoder/layer_4/attention/self/key/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_4/attention/self/value/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_4/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_4/attention/self/value/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_4/attention/self/value/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_4/attention/self/value/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/self/value/kernel" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel" + input: "bert/encoder/layer_4/attention/self/value/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/self/value/bias" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias" + input: "bert/encoder/layer_4/attention/self/value/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/MatMul" + op: "MatMul" + input: "bert/encoder/layer_3/output/layer_normalization_8/add" + input: "bert/encoder/layer_4/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_4/attention/self/value/MatMul" + input: "bert/encoder/layer_4/attention/self/value/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/Reshape/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/Reshape" + op: "Reshape" + input: "bert/encoder/layer_4/attention/self/query/BiasAdd" + input: "bert/encoder/layer_4/attention/self/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/transpose/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/transpose" + op: "Transpose" + input: "bert/encoder/layer_4/attention/self/Reshape" + input: "bert/encoder/layer_4/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/Reshape_1/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_4/attention/self/key/BiasAdd" + input: "bert/encoder/layer_4/attention/self/Reshape_1/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/transpose_1/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/transpose_1" + op: "Transpose" + input: "bert/encoder/layer_4/attention/self/Reshape_1" + input: "bert/encoder/layer_4/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/MatMul" + op: "BatchMatMulV2" + input: "bert/encoder/layer_4/attention/self/transpose" + input: "bert/encoder/layer_4/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/Mul/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.125 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/Mul" + op: "Mul" + input: "bert/encoder/layer_4/attention/self/MatMul" + input: "bert/encoder/layer_4/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/ExpandDims/dim" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/ExpandDims" + op: "ExpandDims" + input: "bert/encoder/mul" + input: "bert/encoder/layer_4/attention/self/ExpandDims/dim" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tdim" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/sub/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/sub" + op: "Sub" + input: "bert/encoder/layer_4/attention/self/sub/x" + input: "bert/encoder/layer_4/attention/self/ExpandDims" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/mul_1/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: -10000.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/mul_1" + op: "Mul" + input: "bert/encoder/layer_4/attention/self/sub" + input: "bert/encoder/layer_4/attention/self/mul_1/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/add" + op: "AddV2" + input: "bert/encoder/layer_4/attention/self/Mul" + input: "bert/encoder/layer_4/attention/self/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/Softmax" + op: "Softmax" + input: "bert/encoder/layer_4/attention/self/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_4/attention/self/Softmax" + input: "bert/encoder/layer_4/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_4/attention/self/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_4/attention/self/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_4/attention/self/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_4/attention/self/dropout/GreaterEqual" + input: "bert/encoder/layer_4/attention/self/dropout/Mul" + input: "bert/encoder/layer_4/attention/self/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/Reshape_2/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/Reshape_2" + op: "Reshape" + input: "bert/encoder/layer_4/attention/self/value/BiasAdd" + input: "bert/encoder/layer_4/attention/self/Reshape_2/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/transpose_2/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/transpose_2" + op: "Transpose" + input: "bert/encoder/layer_4/attention/self/Reshape_2" + input: "bert/encoder/layer_4/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_4/attention/self/dropout/SelectV2" + input: "bert/encoder/layer_4/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/transpose_3/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/transpose_3" + op: "Transpose" + input: "bert/encoder/layer_4/attention/self/MatMul_1" + input: "bert/encoder/layer_4/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/Reshape_3/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/Reshape_3" + op: "Reshape" + input: "bert/encoder/layer_4/attention/self/transpose_3" + input: "bert/encoder/layer_4/attention/self/Reshape_3/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_4/attention/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_4/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_4/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_4/attention/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_4/attention/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel" + input: "bert/encoder/layer_4/attention/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias" + input: "bert/encoder/layer_4/attention/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_4/attention/self/Reshape_3" + input: "bert/encoder/layer_4/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_4/attention/output/dense/MatMul" + input: "bert/encoder/layer_4/attention/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_4/attention/output/dense/BiasAdd" + input: "bert/encoder/layer_4/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_4/attention/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_4/attention/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_4/attention/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_4/attention/output/dropout/GreaterEqual" + input: "bert/encoder/layer_4/attention/output/dropout/Mul" + input: "bert/encoder/layer_4/attention/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/add" + op: "AddV2" + input: "bert/encoder/layer_4/attention/output/dropout/SelectV2" + input: "bert/encoder/layer_3/output/layer_normalization_8/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma" + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta" + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/Shape" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/strided_slice/stack" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/strided_slice/stack_1" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/mul" + op: "Mul" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/mul/x" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/Shape" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/strided_slice_1/stack" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/strided_slice_1/stack_1" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/mul_1" + op: "Mul" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/mul_1/x" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape/shape/0" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/mul" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/mul_1" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape" + op: "Reshape" + input: "bert/encoder/layer_4/attention/output/add" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/ones/packed" + op: "Pack" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/ones" + op: "Fill" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/ones/packed" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/zeros" + op: "Fill" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/zeros/packed" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/ones" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/zeros" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/Const" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/FusedBatchNormV3" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2" + op: "Mul" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape_1" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/add" + op: "AddV2" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_4/intermediate/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_4/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_4/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_4/intermediate/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_4/intermediate/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/intermediate/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel" + input: "bert/encoder/layer_4/intermediate/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_4/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_4/intermediate/dense/bias/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/intermediate/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias" + input: "bert/encoder/layer_4/intermediate/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/add" + input: "bert/encoder/layer_4/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_4/intermediate/dense/MatMul" + input: "bert/encoder/layer_4/intermediate/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/Pow/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 3.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/Pow" + op: "Pow" + input: "bert/encoder/layer_4/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_4/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.044714998453855515 + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/mul" + op: "Mul" + input: "bert/encoder/layer_4/intermediate/dense/mul/x" + input: "bert/encoder/layer_4/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/add" + op: "AddV2" + input: "bert/encoder/layer_4/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_4/intermediate/dense/mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.7978845834732056 + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/mul_1" + op: "Mul" + input: "bert/encoder/layer_4/intermediate/dense/mul_1/x" + input: "bert/encoder/layer_4/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/Tanh" + op: "Tanh" + input: "bert/encoder/layer_4/intermediate/dense/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/add_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/add_1" + op: "AddV2" + input: "bert/encoder/layer_4/intermediate/dense/add_1/x" + input: "bert/encoder/layer_4/intermediate/dense/Tanh" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/mul_2/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.5 + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/mul_2" + op: "Mul" + input: "bert/encoder/layer_4/intermediate/dense/mul_2/x" + input: "bert/encoder/layer_4/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/mul_3" + op: "Mul" + input: "bert/encoder/layer_4/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_4/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_4/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_4/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_4/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_4/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_4/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel" + input: "bert/encoder/layer_4/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/dense/bias" + input: "bert/encoder/layer_4/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_4/intermediate/dense/mul_3" + input: "bert/encoder/layer_4/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_4/output/dense/MatMul" + input: "bert/encoder/layer_4/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_4/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_4/output/dense/BiasAdd" + input: "bert/encoder/layer_4/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_4/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_4/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_4/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_4/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_4/output/dropout/GreaterEqual" + input: "bert/encoder/layer_4/output/dropout/Mul" + input: "bert/encoder/layer_4/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/add" + op: "AddV2" + input: "bert/encoder/layer_4/output/dropout/SelectV2" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/output/layer_normalization_10/gamma" + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/output/layer_normalization_10/beta" + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_4/output/layer_normalization_10/Shape" + input: "bert/encoder/layer_4/output/layer_normalization_10/strided_slice/stack" + input: "bert/encoder/layer_4/output/layer_normalization_10/strided_slice/stack_1" + input: "bert/encoder/layer_4/output/layer_normalization_10/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/mul" + op: "Mul" + input: "bert/encoder/layer_4/output/layer_normalization_10/mul/x" + input: "bert/encoder/layer_4/output/layer_normalization_10/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_4/output/layer_normalization_10/Shape" + input: "bert/encoder/layer_4/output/layer_normalization_10/strided_slice_1/stack" + input: "bert/encoder/layer_4/output/layer_normalization_10/strided_slice_1/stack_1" + input: "bert/encoder/layer_4/output/layer_normalization_10/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/mul_1" + op: "Mul" + input: "bert/encoder/layer_4/output/layer_normalization_10/mul_1/x" + input: "bert/encoder/layer_4/output/layer_normalization_10/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_4/output/layer_normalization_10/Reshape/shape/0" + input: "bert/encoder/layer_4/output/layer_normalization_10/mul" + input: "bert/encoder/layer_4/output/layer_normalization_10/mul_1" + input: "bert/encoder/layer_4/output/layer_normalization_10/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/Reshape" + op: "Reshape" + input: "bert/encoder/layer_4/output/add" + input: "bert/encoder/layer_4/output/layer_normalization_10/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/ones/packed" + op: "Pack" + input: "bert/encoder/layer_4/output/layer_normalization_10/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/ones" + op: "Fill" + input: "bert/encoder/layer_4/output/layer_normalization_10/ones/packed" + input: "bert/encoder/layer_4/output/layer_normalization_10/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_4/output/layer_normalization_10/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/zeros" + op: "Fill" + input: "bert/encoder/layer_4/output/layer_normalization_10/zeros/packed" + input: "bert/encoder/layer_4/output/layer_normalization_10/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_4/output/layer_normalization_10/Reshape" + input: "bert/encoder/layer_4/output/layer_normalization_10/ones" + input: "bert/encoder/layer_4/output/layer_normalization_10/zeros" + input: "bert/encoder/layer_4/output/layer_normalization_10/Const" + input: "bert/encoder/layer_4/output/layer_normalization_10/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_4/output/layer_normalization_10/FusedBatchNormV3" + input: "bert/encoder/layer_4/output/layer_normalization_10/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/mul_2" + op: "Mul" + input: "bert/encoder/layer_4/output/layer_normalization_10/Reshape_1" + input: "bert/encoder/layer_4/output/layer_normalization_10/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/add" + op: "AddV2" + input: "bert/encoder/layer_4/output/layer_normalization_10/mul_2" + input: "bert/encoder/layer_4/output/layer_normalization_10/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_5/attention/self/query/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_5/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_5/attention/self/query/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_5/attention/self/query/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_5/attention/self/query/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/self/query/kernel" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel" + input: "bert/encoder/layer_5/attention/self/query/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/self/query/bias" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias" + input: "bert/encoder/layer_5/attention/self/query/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/MatMul" + op: "MatMul" + input: "bert/encoder/layer_4/output/layer_normalization_10/add" + input: "bert/encoder/layer_5/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_5/attention/self/query/MatMul" + input: "bert/encoder/layer_5/attention/self/query/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_5/attention/self/key/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_5/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_5/attention/self/key/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_5/attention/self/key/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_5/attention/self/key/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/self/key/kernel" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel" + input: "bert/encoder/layer_5/attention/self/key/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/self/key/bias" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias" + input: "bert/encoder/layer_5/attention/self/key/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/MatMul" + op: "MatMul" + input: "bert/encoder/layer_4/output/layer_normalization_10/add" + input: "bert/encoder/layer_5/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_5/attention/self/key/MatMul" + input: "bert/encoder/layer_5/attention/self/key/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_5/attention/self/value/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_5/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_5/attention/self/value/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_5/attention/self/value/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_5/attention/self/value/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/self/value/kernel" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel" + input: "bert/encoder/layer_5/attention/self/value/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/self/value/bias" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias" + input: "bert/encoder/layer_5/attention/self/value/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/MatMul" + op: "MatMul" + input: "bert/encoder/layer_4/output/layer_normalization_10/add" + input: "bert/encoder/layer_5/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_5/attention/self/value/MatMul" + input: "bert/encoder/layer_5/attention/self/value/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/Reshape/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/Reshape" + op: "Reshape" + input: "bert/encoder/layer_5/attention/self/query/BiasAdd" + input: "bert/encoder/layer_5/attention/self/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/transpose/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/transpose" + op: "Transpose" + input: "bert/encoder/layer_5/attention/self/Reshape" + input: "bert/encoder/layer_5/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/Reshape_1/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_5/attention/self/key/BiasAdd" + input: "bert/encoder/layer_5/attention/self/Reshape_1/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/transpose_1/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/transpose_1" + op: "Transpose" + input: "bert/encoder/layer_5/attention/self/Reshape_1" + input: "bert/encoder/layer_5/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/MatMul" + op: "BatchMatMulV2" + input: "bert/encoder/layer_5/attention/self/transpose" + input: "bert/encoder/layer_5/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/Mul/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.125 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/Mul" + op: "Mul" + input: "bert/encoder/layer_5/attention/self/MatMul" + input: "bert/encoder/layer_5/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/ExpandDims/dim" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/ExpandDims" + op: "ExpandDims" + input: "bert/encoder/mul" + input: "bert/encoder/layer_5/attention/self/ExpandDims/dim" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tdim" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/sub/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/sub" + op: "Sub" + input: "bert/encoder/layer_5/attention/self/sub/x" + input: "bert/encoder/layer_5/attention/self/ExpandDims" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/mul_1/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: -10000.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/mul_1" + op: "Mul" + input: "bert/encoder/layer_5/attention/self/sub" + input: "bert/encoder/layer_5/attention/self/mul_1/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/add" + op: "AddV2" + input: "bert/encoder/layer_5/attention/self/Mul" + input: "bert/encoder/layer_5/attention/self/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/Softmax" + op: "Softmax" + input: "bert/encoder/layer_5/attention/self/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_5/attention/self/Softmax" + input: "bert/encoder/layer_5/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_5/attention/self/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_5/attention/self/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_5/attention/self/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_5/attention/self/dropout/GreaterEqual" + input: "bert/encoder/layer_5/attention/self/dropout/Mul" + input: "bert/encoder/layer_5/attention/self/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/Reshape_2/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/Reshape_2" + op: "Reshape" + input: "bert/encoder/layer_5/attention/self/value/BiasAdd" + input: "bert/encoder/layer_5/attention/self/Reshape_2/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/transpose_2/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/transpose_2" + op: "Transpose" + input: "bert/encoder/layer_5/attention/self/Reshape_2" + input: "bert/encoder/layer_5/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_5/attention/self/dropout/SelectV2" + input: "bert/encoder/layer_5/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/transpose_3/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/transpose_3" + op: "Transpose" + input: "bert/encoder/layer_5/attention/self/MatMul_1" + input: "bert/encoder/layer_5/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/Reshape_3/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/Reshape_3" + op: "Reshape" + input: "bert/encoder/layer_5/attention/self/transpose_3" + input: "bert/encoder/layer_5/attention/self/Reshape_3/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_5/attention/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_5/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_5/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_5/attention/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_5/attention/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel" + input: "bert/encoder/layer_5/attention/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias" + input: "bert/encoder/layer_5/attention/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_5/attention/self/Reshape_3" + input: "bert/encoder/layer_5/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_5/attention/output/dense/MatMul" + input: "bert/encoder/layer_5/attention/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_5/attention/output/dense/BiasAdd" + input: "bert/encoder/layer_5/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_5/attention/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_5/attention/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_5/attention/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_5/attention/output/dropout/GreaterEqual" + input: "bert/encoder/layer_5/attention/output/dropout/Mul" + input: "bert/encoder/layer_5/attention/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/add" + op: "AddV2" + input: "bert/encoder/layer_5/attention/output/dropout/SelectV2" + input: "bert/encoder/layer_4/output/layer_normalization_10/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma" + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta" + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/Shape" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/strided_slice/stack" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/strided_slice/stack_1" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/mul" + op: "Mul" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/mul/x" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/Shape" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/strided_slice_1/stack" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/strided_slice_1/stack_1" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/mul_1" + op: "Mul" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/mul_1/x" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape/shape/0" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/mul" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/mul_1" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape" + op: "Reshape" + input: "bert/encoder/layer_5/attention/output/add" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/ones/packed" + op: "Pack" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/ones" + op: "Fill" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/ones/packed" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/zeros" + op: "Fill" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/zeros/packed" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/ones" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/zeros" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/Const" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/FusedBatchNormV3" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2" + op: "Mul" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape_1" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/add" + op: "AddV2" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_5/intermediate/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_5/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_5/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_5/intermediate/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_5/intermediate/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/intermediate/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel" + input: "bert/encoder/layer_5/intermediate/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_5/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_5/intermediate/dense/bias/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/intermediate/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias" + input: "bert/encoder/layer_5/intermediate/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/add" + input: "bert/encoder/layer_5/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_5/intermediate/dense/MatMul" + input: "bert/encoder/layer_5/intermediate/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/Pow/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 3.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/Pow" + op: "Pow" + input: "bert/encoder/layer_5/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_5/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.044714998453855515 + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/mul" + op: "Mul" + input: "bert/encoder/layer_5/intermediate/dense/mul/x" + input: "bert/encoder/layer_5/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/add" + op: "AddV2" + input: "bert/encoder/layer_5/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_5/intermediate/dense/mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.7978845834732056 + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/mul_1" + op: "Mul" + input: "bert/encoder/layer_5/intermediate/dense/mul_1/x" + input: "bert/encoder/layer_5/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/Tanh" + op: "Tanh" + input: "bert/encoder/layer_5/intermediate/dense/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/add_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/add_1" + op: "AddV2" + input: "bert/encoder/layer_5/intermediate/dense/add_1/x" + input: "bert/encoder/layer_5/intermediate/dense/Tanh" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/mul_2/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.5 + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/mul_2" + op: "Mul" + input: "bert/encoder/layer_5/intermediate/dense/mul_2/x" + input: "bert/encoder/layer_5/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/mul_3" + op: "Mul" + input: "bert/encoder/layer_5/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_5/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_5/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_5/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_5/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_5/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_5/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel" + input: "bert/encoder/layer_5/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/dense/bias" + input: "bert/encoder/layer_5/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_5/intermediate/dense/mul_3" + input: "bert/encoder/layer_5/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_5/output/dense/MatMul" + input: "bert/encoder/layer_5/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_5/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_5/output/dense/BiasAdd" + input: "bert/encoder/layer_5/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_5/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_5/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_5/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_5/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_5/output/dropout/GreaterEqual" + input: "bert/encoder/layer_5/output/dropout/Mul" + input: "bert/encoder/layer_5/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/add" + op: "AddV2" + input: "bert/encoder/layer_5/output/dropout/SelectV2" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/output/layer_normalization_12/gamma" + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/output/layer_normalization_12/beta" + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_5/output/layer_normalization_12/Shape" + input: "bert/encoder/layer_5/output/layer_normalization_12/strided_slice/stack" + input: "bert/encoder/layer_5/output/layer_normalization_12/strided_slice/stack_1" + input: "bert/encoder/layer_5/output/layer_normalization_12/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/mul" + op: "Mul" + input: "bert/encoder/layer_5/output/layer_normalization_12/mul/x" + input: "bert/encoder/layer_5/output/layer_normalization_12/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_5/output/layer_normalization_12/Shape" + input: "bert/encoder/layer_5/output/layer_normalization_12/strided_slice_1/stack" + input: "bert/encoder/layer_5/output/layer_normalization_12/strided_slice_1/stack_1" + input: "bert/encoder/layer_5/output/layer_normalization_12/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/mul_1" + op: "Mul" + input: "bert/encoder/layer_5/output/layer_normalization_12/mul_1/x" + input: "bert/encoder/layer_5/output/layer_normalization_12/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_5/output/layer_normalization_12/Reshape/shape/0" + input: "bert/encoder/layer_5/output/layer_normalization_12/mul" + input: "bert/encoder/layer_5/output/layer_normalization_12/mul_1" + input: "bert/encoder/layer_5/output/layer_normalization_12/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/Reshape" + op: "Reshape" + input: "bert/encoder/layer_5/output/add" + input: "bert/encoder/layer_5/output/layer_normalization_12/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/ones/packed" + op: "Pack" + input: "bert/encoder/layer_5/output/layer_normalization_12/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/ones" + op: "Fill" + input: "bert/encoder/layer_5/output/layer_normalization_12/ones/packed" + input: "bert/encoder/layer_5/output/layer_normalization_12/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_5/output/layer_normalization_12/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/zeros" + op: "Fill" + input: "bert/encoder/layer_5/output/layer_normalization_12/zeros/packed" + input: "bert/encoder/layer_5/output/layer_normalization_12/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_5/output/layer_normalization_12/Reshape" + input: "bert/encoder/layer_5/output/layer_normalization_12/ones" + input: "bert/encoder/layer_5/output/layer_normalization_12/zeros" + input: "bert/encoder/layer_5/output/layer_normalization_12/Const" + input: "bert/encoder/layer_5/output/layer_normalization_12/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_5/output/layer_normalization_12/FusedBatchNormV3" + input: "bert/encoder/layer_5/output/layer_normalization_12/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/mul_2" + op: "Mul" + input: "bert/encoder/layer_5/output/layer_normalization_12/Reshape_1" + input: "bert/encoder/layer_5/output/layer_normalization_12/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/add" + op: "AddV2" + input: "bert/encoder/layer_5/output/layer_normalization_12/mul_2" + input: "bert/encoder/layer_5/output/layer_normalization_12/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_6/attention/self/query/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_6/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_6/attention/self/query/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_6/attention/self/query/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_6/attention/self/query/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/self/query/kernel" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel" + input: "bert/encoder/layer_6/attention/self/query/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/self/query/bias" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias" + input: "bert/encoder/layer_6/attention/self/query/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/MatMul" + op: "MatMul" + input: "bert/encoder/layer_5/output/layer_normalization_12/add" + input: "bert/encoder/layer_6/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_6/attention/self/query/MatMul" + input: "bert/encoder/layer_6/attention/self/query/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_6/attention/self/key/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_6/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_6/attention/self/key/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_6/attention/self/key/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_6/attention/self/key/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/self/key/kernel" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel" + input: "bert/encoder/layer_6/attention/self/key/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/self/key/bias" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias" + input: "bert/encoder/layer_6/attention/self/key/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/MatMul" + op: "MatMul" + input: "bert/encoder/layer_5/output/layer_normalization_12/add" + input: "bert/encoder/layer_6/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_6/attention/self/key/MatMul" + input: "bert/encoder/layer_6/attention/self/key/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_6/attention/self/value/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_6/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_6/attention/self/value/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_6/attention/self/value/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_6/attention/self/value/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/self/value/kernel" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel" + input: "bert/encoder/layer_6/attention/self/value/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/self/value/bias" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias" + input: "bert/encoder/layer_6/attention/self/value/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/MatMul" + op: "MatMul" + input: "bert/encoder/layer_5/output/layer_normalization_12/add" + input: "bert/encoder/layer_6/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_6/attention/self/value/MatMul" + input: "bert/encoder/layer_6/attention/self/value/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/Reshape/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/Reshape" + op: "Reshape" + input: "bert/encoder/layer_6/attention/self/query/BiasAdd" + input: "bert/encoder/layer_6/attention/self/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/transpose/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/transpose" + op: "Transpose" + input: "bert/encoder/layer_6/attention/self/Reshape" + input: "bert/encoder/layer_6/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/Reshape_1/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_6/attention/self/key/BiasAdd" + input: "bert/encoder/layer_6/attention/self/Reshape_1/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/transpose_1/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/transpose_1" + op: "Transpose" + input: "bert/encoder/layer_6/attention/self/Reshape_1" + input: "bert/encoder/layer_6/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/MatMul" + op: "BatchMatMulV2" + input: "bert/encoder/layer_6/attention/self/transpose" + input: "bert/encoder/layer_6/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/Mul/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.125 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/Mul" + op: "Mul" + input: "bert/encoder/layer_6/attention/self/MatMul" + input: "bert/encoder/layer_6/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/ExpandDims/dim" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/ExpandDims" + op: "ExpandDims" + input: "bert/encoder/mul" + input: "bert/encoder/layer_6/attention/self/ExpandDims/dim" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tdim" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/sub/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/sub" + op: "Sub" + input: "bert/encoder/layer_6/attention/self/sub/x" + input: "bert/encoder/layer_6/attention/self/ExpandDims" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/mul_1/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: -10000.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/mul_1" + op: "Mul" + input: "bert/encoder/layer_6/attention/self/sub" + input: "bert/encoder/layer_6/attention/self/mul_1/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/add" + op: "AddV2" + input: "bert/encoder/layer_6/attention/self/Mul" + input: "bert/encoder/layer_6/attention/self/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/Softmax" + op: "Softmax" + input: "bert/encoder/layer_6/attention/self/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_6/attention/self/Softmax" + input: "bert/encoder/layer_6/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_6/attention/self/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_6/attention/self/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_6/attention/self/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_6/attention/self/dropout/GreaterEqual" + input: "bert/encoder/layer_6/attention/self/dropout/Mul" + input: "bert/encoder/layer_6/attention/self/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/Reshape_2/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/Reshape_2" + op: "Reshape" + input: "bert/encoder/layer_6/attention/self/value/BiasAdd" + input: "bert/encoder/layer_6/attention/self/Reshape_2/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/transpose_2/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/transpose_2" + op: "Transpose" + input: "bert/encoder/layer_6/attention/self/Reshape_2" + input: "bert/encoder/layer_6/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_6/attention/self/dropout/SelectV2" + input: "bert/encoder/layer_6/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/transpose_3/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/transpose_3" + op: "Transpose" + input: "bert/encoder/layer_6/attention/self/MatMul_1" + input: "bert/encoder/layer_6/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/Reshape_3/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/Reshape_3" + op: "Reshape" + input: "bert/encoder/layer_6/attention/self/transpose_3" + input: "bert/encoder/layer_6/attention/self/Reshape_3/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_6/attention/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_6/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_6/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_6/attention/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_6/attention/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel" + input: "bert/encoder/layer_6/attention/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias" + input: "bert/encoder/layer_6/attention/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_6/attention/self/Reshape_3" + input: "bert/encoder/layer_6/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_6/attention/output/dense/MatMul" + input: "bert/encoder/layer_6/attention/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_6/attention/output/dense/BiasAdd" + input: "bert/encoder/layer_6/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_6/attention/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_6/attention/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_6/attention/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_6/attention/output/dropout/GreaterEqual" + input: "bert/encoder/layer_6/attention/output/dropout/Mul" + input: "bert/encoder/layer_6/attention/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/add" + op: "AddV2" + input: "bert/encoder/layer_6/attention/output/dropout/SelectV2" + input: "bert/encoder/layer_5/output/layer_normalization_12/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma" + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta" + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/Shape" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/strided_slice/stack" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/strided_slice/stack_1" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/mul" + op: "Mul" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/mul/x" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/Shape" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/strided_slice_1/stack" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/strided_slice_1/stack_1" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/mul_1" + op: "Mul" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/mul_1/x" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape/shape/0" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/mul" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/mul_1" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape" + op: "Reshape" + input: "bert/encoder/layer_6/attention/output/add" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/ones/packed" + op: "Pack" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/ones" + op: "Fill" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/ones/packed" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/zeros" + op: "Fill" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/zeros/packed" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/ones" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/zeros" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/Const" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/FusedBatchNormV3" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2" + op: "Mul" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape_1" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/add" + op: "AddV2" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_6/intermediate/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_6/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_6/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_6/intermediate/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_6/intermediate/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/intermediate/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel" + input: "bert/encoder/layer_6/intermediate/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_6/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_6/intermediate/dense/bias/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/intermediate/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias" + input: "bert/encoder/layer_6/intermediate/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/add" + input: "bert/encoder/layer_6/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_6/intermediate/dense/MatMul" + input: "bert/encoder/layer_6/intermediate/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/Pow/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 3.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/Pow" + op: "Pow" + input: "bert/encoder/layer_6/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_6/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.044714998453855515 + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/mul" + op: "Mul" + input: "bert/encoder/layer_6/intermediate/dense/mul/x" + input: "bert/encoder/layer_6/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/add" + op: "AddV2" + input: "bert/encoder/layer_6/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_6/intermediate/dense/mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.7978845834732056 + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/mul_1" + op: "Mul" + input: "bert/encoder/layer_6/intermediate/dense/mul_1/x" + input: "bert/encoder/layer_6/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/Tanh" + op: "Tanh" + input: "bert/encoder/layer_6/intermediate/dense/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/add_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/add_1" + op: "AddV2" + input: "bert/encoder/layer_6/intermediate/dense/add_1/x" + input: "bert/encoder/layer_6/intermediate/dense/Tanh" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/mul_2/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.5 + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/mul_2" + op: "Mul" + input: "bert/encoder/layer_6/intermediate/dense/mul_2/x" + input: "bert/encoder/layer_6/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/mul_3" + op: "Mul" + input: "bert/encoder/layer_6/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_6/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_6/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_6/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_6/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_6/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_6/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel" + input: "bert/encoder/layer_6/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/dense/bias" + input: "bert/encoder/layer_6/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_6/intermediate/dense/mul_3" + input: "bert/encoder/layer_6/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_6/output/dense/MatMul" + input: "bert/encoder/layer_6/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_6/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_6/output/dense/BiasAdd" + input: "bert/encoder/layer_6/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_6/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_6/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_6/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_6/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_6/output/dropout/GreaterEqual" + input: "bert/encoder/layer_6/output/dropout/Mul" + input: "bert/encoder/layer_6/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/add" + op: "AddV2" + input: "bert/encoder/layer_6/output/dropout/SelectV2" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/output/layer_normalization_14/gamma" + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/output/layer_normalization_14/beta" + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_6/output/layer_normalization_14/Shape" + input: "bert/encoder/layer_6/output/layer_normalization_14/strided_slice/stack" + input: "bert/encoder/layer_6/output/layer_normalization_14/strided_slice/stack_1" + input: "bert/encoder/layer_6/output/layer_normalization_14/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/mul" + op: "Mul" + input: "bert/encoder/layer_6/output/layer_normalization_14/mul/x" + input: "bert/encoder/layer_6/output/layer_normalization_14/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_6/output/layer_normalization_14/Shape" + input: "bert/encoder/layer_6/output/layer_normalization_14/strided_slice_1/stack" + input: "bert/encoder/layer_6/output/layer_normalization_14/strided_slice_1/stack_1" + input: "bert/encoder/layer_6/output/layer_normalization_14/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/mul_1" + op: "Mul" + input: "bert/encoder/layer_6/output/layer_normalization_14/mul_1/x" + input: "bert/encoder/layer_6/output/layer_normalization_14/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_6/output/layer_normalization_14/Reshape/shape/0" + input: "bert/encoder/layer_6/output/layer_normalization_14/mul" + input: "bert/encoder/layer_6/output/layer_normalization_14/mul_1" + input: "bert/encoder/layer_6/output/layer_normalization_14/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/Reshape" + op: "Reshape" + input: "bert/encoder/layer_6/output/add" + input: "bert/encoder/layer_6/output/layer_normalization_14/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/ones/packed" + op: "Pack" + input: "bert/encoder/layer_6/output/layer_normalization_14/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/ones" + op: "Fill" + input: "bert/encoder/layer_6/output/layer_normalization_14/ones/packed" + input: "bert/encoder/layer_6/output/layer_normalization_14/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_6/output/layer_normalization_14/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/zeros" + op: "Fill" + input: "bert/encoder/layer_6/output/layer_normalization_14/zeros/packed" + input: "bert/encoder/layer_6/output/layer_normalization_14/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_6/output/layer_normalization_14/Reshape" + input: "bert/encoder/layer_6/output/layer_normalization_14/ones" + input: "bert/encoder/layer_6/output/layer_normalization_14/zeros" + input: "bert/encoder/layer_6/output/layer_normalization_14/Const" + input: "bert/encoder/layer_6/output/layer_normalization_14/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_6/output/layer_normalization_14/FusedBatchNormV3" + input: "bert/encoder/layer_6/output/layer_normalization_14/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/mul_2" + op: "Mul" + input: "bert/encoder/layer_6/output/layer_normalization_14/Reshape_1" + input: "bert/encoder/layer_6/output/layer_normalization_14/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/add" + op: "AddV2" + input: "bert/encoder/layer_6/output/layer_normalization_14/mul_2" + input: "bert/encoder/layer_6/output/layer_normalization_14/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_7/attention/self/query/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_7/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_7/attention/self/query/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_7/attention/self/query/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_7/attention/self/query/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/self/query/kernel" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel" + input: "bert/encoder/layer_7/attention/self/query/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/self/query/bias" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias" + input: "bert/encoder/layer_7/attention/self/query/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/MatMul" + op: "MatMul" + input: "bert/encoder/layer_6/output/layer_normalization_14/add" + input: "bert/encoder/layer_7/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_7/attention/self/query/MatMul" + input: "bert/encoder/layer_7/attention/self/query/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_7/attention/self/key/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_7/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_7/attention/self/key/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_7/attention/self/key/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_7/attention/self/key/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/self/key/kernel" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel" + input: "bert/encoder/layer_7/attention/self/key/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/self/key/bias" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias" + input: "bert/encoder/layer_7/attention/self/key/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/MatMul" + op: "MatMul" + input: "bert/encoder/layer_6/output/layer_normalization_14/add" + input: "bert/encoder/layer_7/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_7/attention/self/key/MatMul" + input: "bert/encoder/layer_7/attention/self/key/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_7/attention/self/value/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_7/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_7/attention/self/value/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_7/attention/self/value/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_7/attention/self/value/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/self/value/kernel" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel" + input: "bert/encoder/layer_7/attention/self/value/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/self/value/bias" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias" + input: "bert/encoder/layer_7/attention/self/value/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/MatMul" + op: "MatMul" + input: "bert/encoder/layer_6/output/layer_normalization_14/add" + input: "bert/encoder/layer_7/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_7/attention/self/value/MatMul" + input: "bert/encoder/layer_7/attention/self/value/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/Reshape/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/Reshape" + op: "Reshape" + input: "bert/encoder/layer_7/attention/self/query/BiasAdd" + input: "bert/encoder/layer_7/attention/self/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/transpose/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/transpose" + op: "Transpose" + input: "bert/encoder/layer_7/attention/self/Reshape" + input: "bert/encoder/layer_7/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/Reshape_1/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_7/attention/self/key/BiasAdd" + input: "bert/encoder/layer_7/attention/self/Reshape_1/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/transpose_1/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/transpose_1" + op: "Transpose" + input: "bert/encoder/layer_7/attention/self/Reshape_1" + input: "bert/encoder/layer_7/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/MatMul" + op: "BatchMatMulV2" + input: "bert/encoder/layer_7/attention/self/transpose" + input: "bert/encoder/layer_7/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/Mul/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.125 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/Mul" + op: "Mul" + input: "bert/encoder/layer_7/attention/self/MatMul" + input: "bert/encoder/layer_7/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/ExpandDims/dim" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/ExpandDims" + op: "ExpandDims" + input: "bert/encoder/mul" + input: "bert/encoder/layer_7/attention/self/ExpandDims/dim" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tdim" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/sub/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/sub" + op: "Sub" + input: "bert/encoder/layer_7/attention/self/sub/x" + input: "bert/encoder/layer_7/attention/self/ExpandDims" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/mul_1/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: -10000.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/mul_1" + op: "Mul" + input: "bert/encoder/layer_7/attention/self/sub" + input: "bert/encoder/layer_7/attention/self/mul_1/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/add" + op: "AddV2" + input: "bert/encoder/layer_7/attention/self/Mul" + input: "bert/encoder/layer_7/attention/self/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/Softmax" + op: "Softmax" + input: "bert/encoder/layer_7/attention/self/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_7/attention/self/Softmax" + input: "bert/encoder/layer_7/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_7/attention/self/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_7/attention/self/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_7/attention/self/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_7/attention/self/dropout/GreaterEqual" + input: "bert/encoder/layer_7/attention/self/dropout/Mul" + input: "bert/encoder/layer_7/attention/self/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/Reshape_2/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/Reshape_2" + op: "Reshape" + input: "bert/encoder/layer_7/attention/self/value/BiasAdd" + input: "bert/encoder/layer_7/attention/self/Reshape_2/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/transpose_2/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/transpose_2" + op: "Transpose" + input: "bert/encoder/layer_7/attention/self/Reshape_2" + input: "bert/encoder/layer_7/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_7/attention/self/dropout/SelectV2" + input: "bert/encoder/layer_7/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/transpose_3/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/transpose_3" + op: "Transpose" + input: "bert/encoder/layer_7/attention/self/MatMul_1" + input: "bert/encoder/layer_7/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/Reshape_3/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/Reshape_3" + op: "Reshape" + input: "bert/encoder/layer_7/attention/self/transpose_3" + input: "bert/encoder/layer_7/attention/self/Reshape_3/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_7/attention/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_7/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_7/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_7/attention/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_7/attention/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel" + input: "bert/encoder/layer_7/attention/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias" + input: "bert/encoder/layer_7/attention/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_7/attention/self/Reshape_3" + input: "bert/encoder/layer_7/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_7/attention/output/dense/MatMul" + input: "bert/encoder/layer_7/attention/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_7/attention/output/dense/BiasAdd" + input: "bert/encoder/layer_7/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_7/attention/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_7/attention/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_7/attention/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_7/attention/output/dropout/GreaterEqual" + input: "bert/encoder/layer_7/attention/output/dropout/Mul" + input: "bert/encoder/layer_7/attention/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/add" + op: "AddV2" + input: "bert/encoder/layer_7/attention/output/dropout/SelectV2" + input: "bert/encoder/layer_6/output/layer_normalization_14/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma" + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta" + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/Shape" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/strided_slice/stack" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/strided_slice/stack_1" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/mul" + op: "Mul" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/mul/x" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/Shape" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/strided_slice_1/stack" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/strided_slice_1/stack_1" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/mul_1" + op: "Mul" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/mul_1/x" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape/shape/0" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/mul" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/mul_1" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape" + op: "Reshape" + input: "bert/encoder/layer_7/attention/output/add" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/ones/packed" + op: "Pack" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/ones" + op: "Fill" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/ones/packed" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/zeros" + op: "Fill" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/zeros/packed" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/ones" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/zeros" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/Const" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/FusedBatchNormV3" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2" + op: "Mul" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape_1" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/add" + op: "AddV2" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_7/intermediate/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_7/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_7/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_7/intermediate/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_7/intermediate/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/intermediate/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel" + input: "bert/encoder/layer_7/intermediate/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_7/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_7/intermediate/dense/bias/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/intermediate/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias" + input: "bert/encoder/layer_7/intermediate/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/add" + input: "bert/encoder/layer_7/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_7/intermediate/dense/MatMul" + input: "bert/encoder/layer_7/intermediate/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/Pow/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 3.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/Pow" + op: "Pow" + input: "bert/encoder/layer_7/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_7/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.044714998453855515 + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/mul" + op: "Mul" + input: "bert/encoder/layer_7/intermediate/dense/mul/x" + input: "bert/encoder/layer_7/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/add" + op: "AddV2" + input: "bert/encoder/layer_7/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_7/intermediate/dense/mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.7978845834732056 + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/mul_1" + op: "Mul" + input: "bert/encoder/layer_7/intermediate/dense/mul_1/x" + input: "bert/encoder/layer_7/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/Tanh" + op: "Tanh" + input: "bert/encoder/layer_7/intermediate/dense/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/add_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/add_1" + op: "AddV2" + input: "bert/encoder/layer_7/intermediate/dense/add_1/x" + input: "bert/encoder/layer_7/intermediate/dense/Tanh" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/mul_2/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.5 + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/mul_2" + op: "Mul" + input: "bert/encoder/layer_7/intermediate/dense/mul_2/x" + input: "bert/encoder/layer_7/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/mul_3" + op: "Mul" + input: "bert/encoder/layer_7/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_7/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_7/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_7/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_7/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_7/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_7/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel" + input: "bert/encoder/layer_7/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/dense/bias" + input: "bert/encoder/layer_7/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_7/intermediate/dense/mul_3" + input: "bert/encoder/layer_7/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_7/output/dense/MatMul" + input: "bert/encoder/layer_7/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_7/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_7/output/dense/BiasAdd" + input: "bert/encoder/layer_7/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_7/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_7/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_7/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_7/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_7/output/dropout/GreaterEqual" + input: "bert/encoder/layer_7/output/dropout/Mul" + input: "bert/encoder/layer_7/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/add" + op: "AddV2" + input: "bert/encoder/layer_7/output/dropout/SelectV2" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/output/layer_normalization_16/gamma" + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/output/layer_normalization_16/beta" + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_7/output/layer_normalization_16/Shape" + input: "bert/encoder/layer_7/output/layer_normalization_16/strided_slice/stack" + input: "bert/encoder/layer_7/output/layer_normalization_16/strided_slice/stack_1" + input: "bert/encoder/layer_7/output/layer_normalization_16/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/mul" + op: "Mul" + input: "bert/encoder/layer_7/output/layer_normalization_16/mul/x" + input: "bert/encoder/layer_7/output/layer_normalization_16/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_7/output/layer_normalization_16/Shape" + input: "bert/encoder/layer_7/output/layer_normalization_16/strided_slice_1/stack" + input: "bert/encoder/layer_7/output/layer_normalization_16/strided_slice_1/stack_1" + input: "bert/encoder/layer_7/output/layer_normalization_16/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/mul_1" + op: "Mul" + input: "bert/encoder/layer_7/output/layer_normalization_16/mul_1/x" + input: "bert/encoder/layer_7/output/layer_normalization_16/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_7/output/layer_normalization_16/Reshape/shape/0" + input: "bert/encoder/layer_7/output/layer_normalization_16/mul" + input: "bert/encoder/layer_7/output/layer_normalization_16/mul_1" + input: "bert/encoder/layer_7/output/layer_normalization_16/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/Reshape" + op: "Reshape" + input: "bert/encoder/layer_7/output/add" + input: "bert/encoder/layer_7/output/layer_normalization_16/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/ones/packed" + op: "Pack" + input: "bert/encoder/layer_7/output/layer_normalization_16/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/ones" + op: "Fill" + input: "bert/encoder/layer_7/output/layer_normalization_16/ones/packed" + input: "bert/encoder/layer_7/output/layer_normalization_16/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_7/output/layer_normalization_16/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/zeros" + op: "Fill" + input: "bert/encoder/layer_7/output/layer_normalization_16/zeros/packed" + input: "bert/encoder/layer_7/output/layer_normalization_16/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_7/output/layer_normalization_16/Reshape" + input: "bert/encoder/layer_7/output/layer_normalization_16/ones" + input: "bert/encoder/layer_7/output/layer_normalization_16/zeros" + input: "bert/encoder/layer_7/output/layer_normalization_16/Const" + input: "bert/encoder/layer_7/output/layer_normalization_16/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_7/output/layer_normalization_16/FusedBatchNormV3" + input: "bert/encoder/layer_7/output/layer_normalization_16/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/mul_2" + op: "Mul" + input: "bert/encoder/layer_7/output/layer_normalization_16/Reshape_1" + input: "bert/encoder/layer_7/output/layer_normalization_16/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/add" + op: "AddV2" + input: "bert/encoder/layer_7/output/layer_normalization_16/mul_2" + input: "bert/encoder/layer_7/output/layer_normalization_16/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_8/attention/self/query/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_8/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_8/attention/self/query/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_8/attention/self/query/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_8/attention/self/query/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/self/query/kernel" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel" + input: "bert/encoder/layer_8/attention/self/query/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/self/query/bias" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias" + input: "bert/encoder/layer_8/attention/self/query/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/MatMul" + op: "MatMul" + input: "bert/encoder/layer_7/output/layer_normalization_16/add" + input: "bert/encoder/layer_8/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_8/attention/self/query/MatMul" + input: "bert/encoder/layer_8/attention/self/query/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_8/attention/self/key/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_8/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_8/attention/self/key/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_8/attention/self/key/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_8/attention/self/key/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/self/key/kernel" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel" + input: "bert/encoder/layer_8/attention/self/key/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/self/key/bias" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias" + input: "bert/encoder/layer_8/attention/self/key/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/MatMul" + op: "MatMul" + input: "bert/encoder/layer_7/output/layer_normalization_16/add" + input: "bert/encoder/layer_8/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_8/attention/self/key/MatMul" + input: "bert/encoder/layer_8/attention/self/key/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_8/attention/self/value/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_8/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_8/attention/self/value/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_8/attention/self/value/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_8/attention/self/value/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/self/value/kernel" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel" + input: "bert/encoder/layer_8/attention/self/value/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/self/value/bias" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias" + input: "bert/encoder/layer_8/attention/self/value/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/MatMul" + op: "MatMul" + input: "bert/encoder/layer_7/output/layer_normalization_16/add" + input: "bert/encoder/layer_8/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_8/attention/self/value/MatMul" + input: "bert/encoder/layer_8/attention/self/value/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/Reshape/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/Reshape" + op: "Reshape" + input: "bert/encoder/layer_8/attention/self/query/BiasAdd" + input: "bert/encoder/layer_8/attention/self/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/transpose/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/transpose" + op: "Transpose" + input: "bert/encoder/layer_8/attention/self/Reshape" + input: "bert/encoder/layer_8/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/Reshape_1/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_8/attention/self/key/BiasAdd" + input: "bert/encoder/layer_8/attention/self/Reshape_1/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/transpose_1/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/transpose_1" + op: "Transpose" + input: "bert/encoder/layer_8/attention/self/Reshape_1" + input: "bert/encoder/layer_8/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/MatMul" + op: "BatchMatMulV2" + input: "bert/encoder/layer_8/attention/self/transpose" + input: "bert/encoder/layer_8/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/Mul/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.125 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/Mul" + op: "Mul" + input: "bert/encoder/layer_8/attention/self/MatMul" + input: "bert/encoder/layer_8/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/ExpandDims/dim" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/ExpandDims" + op: "ExpandDims" + input: "bert/encoder/mul" + input: "bert/encoder/layer_8/attention/self/ExpandDims/dim" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tdim" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/sub/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/sub" + op: "Sub" + input: "bert/encoder/layer_8/attention/self/sub/x" + input: "bert/encoder/layer_8/attention/self/ExpandDims" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/mul_1/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: -10000.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/mul_1" + op: "Mul" + input: "bert/encoder/layer_8/attention/self/sub" + input: "bert/encoder/layer_8/attention/self/mul_1/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/add" + op: "AddV2" + input: "bert/encoder/layer_8/attention/self/Mul" + input: "bert/encoder/layer_8/attention/self/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/Softmax" + op: "Softmax" + input: "bert/encoder/layer_8/attention/self/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_8/attention/self/Softmax" + input: "bert/encoder/layer_8/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_8/attention/self/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_8/attention/self/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_8/attention/self/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_8/attention/self/dropout/GreaterEqual" + input: "bert/encoder/layer_8/attention/self/dropout/Mul" + input: "bert/encoder/layer_8/attention/self/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/Reshape_2/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/Reshape_2" + op: "Reshape" + input: "bert/encoder/layer_8/attention/self/value/BiasAdd" + input: "bert/encoder/layer_8/attention/self/Reshape_2/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/transpose_2/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/transpose_2" + op: "Transpose" + input: "bert/encoder/layer_8/attention/self/Reshape_2" + input: "bert/encoder/layer_8/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_8/attention/self/dropout/SelectV2" + input: "bert/encoder/layer_8/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/transpose_3/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/transpose_3" + op: "Transpose" + input: "bert/encoder/layer_8/attention/self/MatMul_1" + input: "bert/encoder/layer_8/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/Reshape_3/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/Reshape_3" + op: "Reshape" + input: "bert/encoder/layer_8/attention/self/transpose_3" + input: "bert/encoder/layer_8/attention/self/Reshape_3/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_8/attention/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_8/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_8/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_8/attention/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_8/attention/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel" + input: "bert/encoder/layer_8/attention/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias" + input: "bert/encoder/layer_8/attention/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_8/attention/self/Reshape_3" + input: "bert/encoder/layer_8/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_8/attention/output/dense/MatMul" + input: "bert/encoder/layer_8/attention/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_8/attention/output/dense/BiasAdd" + input: "bert/encoder/layer_8/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_8/attention/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_8/attention/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_8/attention/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_8/attention/output/dropout/GreaterEqual" + input: "bert/encoder/layer_8/attention/output/dropout/Mul" + input: "bert/encoder/layer_8/attention/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/add" + op: "AddV2" + input: "bert/encoder/layer_8/attention/output/dropout/SelectV2" + input: "bert/encoder/layer_7/output/layer_normalization_16/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma" + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta" + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/Shape" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/strided_slice/stack" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/strided_slice/stack_1" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/mul" + op: "Mul" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/mul/x" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/Shape" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/strided_slice_1/stack" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/strided_slice_1/stack_1" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/mul_1" + op: "Mul" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/mul_1/x" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape/shape/0" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/mul" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/mul_1" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape" + op: "Reshape" + input: "bert/encoder/layer_8/attention/output/add" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/ones/packed" + op: "Pack" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/ones" + op: "Fill" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/ones/packed" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/zeros" + op: "Fill" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/zeros/packed" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/ones" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/zeros" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/Const" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/FusedBatchNormV3" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2" + op: "Mul" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape_1" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/add" + op: "AddV2" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_8/intermediate/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_8/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_8/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_8/intermediate/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_8/intermediate/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/intermediate/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel" + input: "bert/encoder/layer_8/intermediate/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_8/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_8/intermediate/dense/bias/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/intermediate/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias" + input: "bert/encoder/layer_8/intermediate/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/add" + input: "bert/encoder/layer_8/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_8/intermediate/dense/MatMul" + input: "bert/encoder/layer_8/intermediate/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/Pow/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 3.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/Pow" + op: "Pow" + input: "bert/encoder/layer_8/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_8/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.044714998453855515 + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/mul" + op: "Mul" + input: "bert/encoder/layer_8/intermediate/dense/mul/x" + input: "bert/encoder/layer_8/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/add" + op: "AddV2" + input: "bert/encoder/layer_8/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_8/intermediate/dense/mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.7978845834732056 + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/mul_1" + op: "Mul" + input: "bert/encoder/layer_8/intermediate/dense/mul_1/x" + input: "bert/encoder/layer_8/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/Tanh" + op: "Tanh" + input: "bert/encoder/layer_8/intermediate/dense/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/add_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/add_1" + op: "AddV2" + input: "bert/encoder/layer_8/intermediate/dense/add_1/x" + input: "bert/encoder/layer_8/intermediate/dense/Tanh" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/mul_2/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.5 + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/mul_2" + op: "Mul" + input: "bert/encoder/layer_8/intermediate/dense/mul_2/x" + input: "bert/encoder/layer_8/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/mul_3" + op: "Mul" + input: "bert/encoder/layer_8/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_8/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_8/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_8/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_8/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_8/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_8/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel" + input: "bert/encoder/layer_8/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/dense/bias" + input: "bert/encoder/layer_8/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_8/intermediate/dense/mul_3" + input: "bert/encoder/layer_8/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_8/output/dense/MatMul" + input: "bert/encoder/layer_8/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_8/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_8/output/dense/BiasAdd" + input: "bert/encoder/layer_8/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_8/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_8/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_8/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_8/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_8/output/dropout/GreaterEqual" + input: "bert/encoder/layer_8/output/dropout/Mul" + input: "bert/encoder/layer_8/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/add" + op: "AddV2" + input: "bert/encoder/layer_8/output/dropout/SelectV2" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/output/layer_normalization_18/gamma" + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/output/layer_normalization_18/beta" + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_8/output/layer_normalization_18/Shape" + input: "bert/encoder/layer_8/output/layer_normalization_18/strided_slice/stack" + input: "bert/encoder/layer_8/output/layer_normalization_18/strided_slice/stack_1" + input: "bert/encoder/layer_8/output/layer_normalization_18/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/mul" + op: "Mul" + input: "bert/encoder/layer_8/output/layer_normalization_18/mul/x" + input: "bert/encoder/layer_8/output/layer_normalization_18/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_8/output/layer_normalization_18/Shape" + input: "bert/encoder/layer_8/output/layer_normalization_18/strided_slice_1/stack" + input: "bert/encoder/layer_8/output/layer_normalization_18/strided_slice_1/stack_1" + input: "bert/encoder/layer_8/output/layer_normalization_18/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/mul_1" + op: "Mul" + input: "bert/encoder/layer_8/output/layer_normalization_18/mul_1/x" + input: "bert/encoder/layer_8/output/layer_normalization_18/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_8/output/layer_normalization_18/Reshape/shape/0" + input: "bert/encoder/layer_8/output/layer_normalization_18/mul" + input: "bert/encoder/layer_8/output/layer_normalization_18/mul_1" + input: "bert/encoder/layer_8/output/layer_normalization_18/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/Reshape" + op: "Reshape" + input: "bert/encoder/layer_8/output/add" + input: "bert/encoder/layer_8/output/layer_normalization_18/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/ones/packed" + op: "Pack" + input: "bert/encoder/layer_8/output/layer_normalization_18/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/ones" + op: "Fill" + input: "bert/encoder/layer_8/output/layer_normalization_18/ones/packed" + input: "bert/encoder/layer_8/output/layer_normalization_18/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_8/output/layer_normalization_18/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/zeros" + op: "Fill" + input: "bert/encoder/layer_8/output/layer_normalization_18/zeros/packed" + input: "bert/encoder/layer_8/output/layer_normalization_18/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_8/output/layer_normalization_18/Reshape" + input: "bert/encoder/layer_8/output/layer_normalization_18/ones" + input: "bert/encoder/layer_8/output/layer_normalization_18/zeros" + input: "bert/encoder/layer_8/output/layer_normalization_18/Const" + input: "bert/encoder/layer_8/output/layer_normalization_18/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_8/output/layer_normalization_18/FusedBatchNormV3" + input: "bert/encoder/layer_8/output/layer_normalization_18/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/mul_2" + op: "Mul" + input: "bert/encoder/layer_8/output/layer_normalization_18/Reshape_1" + input: "bert/encoder/layer_8/output/layer_normalization_18/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/add" + op: "AddV2" + input: "bert/encoder/layer_8/output/layer_normalization_18/mul_2" + input: "bert/encoder/layer_8/output/layer_normalization_18/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_9/attention/self/query/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_9/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_9/attention/self/query/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_9/attention/self/query/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_9/attention/self/query/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/self/query/kernel" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel" + input: "bert/encoder/layer_9/attention/self/query/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/self/query/bias" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias" + input: "bert/encoder/layer_9/attention/self/query/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/MatMul" + op: "MatMul" + input: "bert/encoder/layer_8/output/layer_normalization_18/add" + input: "bert/encoder/layer_9/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_9/attention/self/query/MatMul" + input: "bert/encoder/layer_9/attention/self/query/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_9/attention/self/key/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_9/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_9/attention/self/key/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_9/attention/self/key/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_9/attention/self/key/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/self/key/kernel" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel" + input: "bert/encoder/layer_9/attention/self/key/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/self/key/bias" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias" + input: "bert/encoder/layer_9/attention/self/key/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/MatMul" + op: "MatMul" + input: "bert/encoder/layer_8/output/layer_normalization_18/add" + input: "bert/encoder/layer_9/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_9/attention/self/key/MatMul" + input: "bert/encoder/layer_9/attention/self/key/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_9/attention/self/value/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_9/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_9/attention/self/value/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_9/attention/self/value/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_9/attention/self/value/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/self/value/kernel" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel" + input: "bert/encoder/layer_9/attention/self/value/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/self/value/bias" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias" + input: "bert/encoder/layer_9/attention/self/value/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/MatMul" + op: "MatMul" + input: "bert/encoder/layer_8/output/layer_normalization_18/add" + input: "bert/encoder/layer_9/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_9/attention/self/value/MatMul" + input: "bert/encoder/layer_9/attention/self/value/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/Reshape/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/Reshape" + op: "Reshape" + input: "bert/encoder/layer_9/attention/self/query/BiasAdd" + input: "bert/encoder/layer_9/attention/self/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/transpose/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/transpose" + op: "Transpose" + input: "bert/encoder/layer_9/attention/self/Reshape" + input: "bert/encoder/layer_9/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/Reshape_1/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_9/attention/self/key/BiasAdd" + input: "bert/encoder/layer_9/attention/self/Reshape_1/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/transpose_1/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/transpose_1" + op: "Transpose" + input: "bert/encoder/layer_9/attention/self/Reshape_1" + input: "bert/encoder/layer_9/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/MatMul" + op: "BatchMatMulV2" + input: "bert/encoder/layer_9/attention/self/transpose" + input: "bert/encoder/layer_9/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/Mul/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.125 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/Mul" + op: "Mul" + input: "bert/encoder/layer_9/attention/self/MatMul" + input: "bert/encoder/layer_9/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/ExpandDims/dim" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/ExpandDims" + op: "ExpandDims" + input: "bert/encoder/mul" + input: "bert/encoder/layer_9/attention/self/ExpandDims/dim" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tdim" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/sub/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/sub" + op: "Sub" + input: "bert/encoder/layer_9/attention/self/sub/x" + input: "bert/encoder/layer_9/attention/self/ExpandDims" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/mul_1/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: -10000.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/mul_1" + op: "Mul" + input: "bert/encoder/layer_9/attention/self/sub" + input: "bert/encoder/layer_9/attention/self/mul_1/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/add" + op: "AddV2" + input: "bert/encoder/layer_9/attention/self/Mul" + input: "bert/encoder/layer_9/attention/self/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/Softmax" + op: "Softmax" + input: "bert/encoder/layer_9/attention/self/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_9/attention/self/Softmax" + input: "bert/encoder/layer_9/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_9/attention/self/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_9/attention/self/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_9/attention/self/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_9/attention/self/dropout/GreaterEqual" + input: "bert/encoder/layer_9/attention/self/dropout/Mul" + input: "bert/encoder/layer_9/attention/self/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/Reshape_2/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/Reshape_2" + op: "Reshape" + input: "bert/encoder/layer_9/attention/self/value/BiasAdd" + input: "bert/encoder/layer_9/attention/self/Reshape_2/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/transpose_2/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/transpose_2" + op: "Transpose" + input: "bert/encoder/layer_9/attention/self/Reshape_2" + input: "bert/encoder/layer_9/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_9/attention/self/dropout/SelectV2" + input: "bert/encoder/layer_9/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/transpose_3/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/transpose_3" + op: "Transpose" + input: "bert/encoder/layer_9/attention/self/MatMul_1" + input: "bert/encoder/layer_9/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/Reshape_3/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/Reshape_3" + op: "Reshape" + input: "bert/encoder/layer_9/attention/self/transpose_3" + input: "bert/encoder/layer_9/attention/self/Reshape_3/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_9/attention/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_9/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_9/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_9/attention/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_9/attention/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel" + input: "bert/encoder/layer_9/attention/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias" + input: "bert/encoder/layer_9/attention/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_9/attention/self/Reshape_3" + input: "bert/encoder/layer_9/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_9/attention/output/dense/MatMul" + input: "bert/encoder/layer_9/attention/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_9/attention/output/dense/BiasAdd" + input: "bert/encoder/layer_9/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_9/attention/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_9/attention/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_9/attention/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_9/attention/output/dropout/GreaterEqual" + input: "bert/encoder/layer_9/attention/output/dropout/Mul" + input: "bert/encoder/layer_9/attention/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/add" + op: "AddV2" + input: "bert/encoder/layer_9/attention/output/dropout/SelectV2" + input: "bert/encoder/layer_8/output/layer_normalization_18/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma" + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta" + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/Shape" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/strided_slice/stack" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/strided_slice/stack_1" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/mul" + op: "Mul" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/mul/x" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/Shape" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/strided_slice_1/stack" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/strided_slice_1/stack_1" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/mul_1" + op: "Mul" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/mul_1/x" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape/shape/0" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/mul" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/mul_1" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape" + op: "Reshape" + input: "bert/encoder/layer_9/attention/output/add" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/ones/packed" + op: "Pack" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/ones" + op: "Fill" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/ones/packed" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/zeros" + op: "Fill" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/zeros/packed" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/ones" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/zeros" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/Const" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/FusedBatchNormV3" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2" + op: "Mul" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape_1" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/add" + op: "AddV2" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_9/intermediate/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_9/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_9/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_9/intermediate/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_9/intermediate/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/intermediate/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel" + input: "bert/encoder/layer_9/intermediate/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_9/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_9/intermediate/dense/bias/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/intermediate/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias" + input: "bert/encoder/layer_9/intermediate/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/add" + input: "bert/encoder/layer_9/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_9/intermediate/dense/MatMul" + input: "bert/encoder/layer_9/intermediate/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/Pow/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 3.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/Pow" + op: "Pow" + input: "bert/encoder/layer_9/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_9/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.044714998453855515 + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/mul" + op: "Mul" + input: "bert/encoder/layer_9/intermediate/dense/mul/x" + input: "bert/encoder/layer_9/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/add" + op: "AddV2" + input: "bert/encoder/layer_9/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_9/intermediate/dense/mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.7978845834732056 + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/mul_1" + op: "Mul" + input: "bert/encoder/layer_9/intermediate/dense/mul_1/x" + input: "bert/encoder/layer_9/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/Tanh" + op: "Tanh" + input: "bert/encoder/layer_9/intermediate/dense/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/add_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/add_1" + op: "AddV2" + input: "bert/encoder/layer_9/intermediate/dense/add_1/x" + input: "bert/encoder/layer_9/intermediate/dense/Tanh" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/mul_2/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.5 + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/mul_2" + op: "Mul" + input: "bert/encoder/layer_9/intermediate/dense/mul_2/x" + input: "bert/encoder/layer_9/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/mul_3" + op: "Mul" + input: "bert/encoder/layer_9/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_9/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_9/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_9/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_9/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_9/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_9/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel" + input: "bert/encoder/layer_9/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/dense/bias" + input: "bert/encoder/layer_9/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_9/intermediate/dense/mul_3" + input: "bert/encoder/layer_9/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_9/output/dense/MatMul" + input: "bert/encoder/layer_9/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_9/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_9/output/dense/BiasAdd" + input: "bert/encoder/layer_9/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_9/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_9/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_9/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_9/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_9/output/dropout/GreaterEqual" + input: "bert/encoder/layer_9/output/dropout/Mul" + input: "bert/encoder/layer_9/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/add" + op: "AddV2" + input: "bert/encoder/layer_9/output/dropout/SelectV2" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/output/layer_normalization_20/gamma" + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/output/layer_normalization_20/beta" + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_9/output/layer_normalization_20/Shape" + input: "bert/encoder/layer_9/output/layer_normalization_20/strided_slice/stack" + input: "bert/encoder/layer_9/output/layer_normalization_20/strided_slice/stack_1" + input: "bert/encoder/layer_9/output/layer_normalization_20/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/mul" + op: "Mul" + input: "bert/encoder/layer_9/output/layer_normalization_20/mul/x" + input: "bert/encoder/layer_9/output/layer_normalization_20/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_9/output/layer_normalization_20/Shape" + input: "bert/encoder/layer_9/output/layer_normalization_20/strided_slice_1/stack" + input: "bert/encoder/layer_9/output/layer_normalization_20/strided_slice_1/stack_1" + input: "bert/encoder/layer_9/output/layer_normalization_20/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/mul_1" + op: "Mul" + input: "bert/encoder/layer_9/output/layer_normalization_20/mul_1/x" + input: "bert/encoder/layer_9/output/layer_normalization_20/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_9/output/layer_normalization_20/Reshape/shape/0" + input: "bert/encoder/layer_9/output/layer_normalization_20/mul" + input: "bert/encoder/layer_9/output/layer_normalization_20/mul_1" + input: "bert/encoder/layer_9/output/layer_normalization_20/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/Reshape" + op: "Reshape" + input: "bert/encoder/layer_9/output/add" + input: "bert/encoder/layer_9/output/layer_normalization_20/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/ones/packed" + op: "Pack" + input: "bert/encoder/layer_9/output/layer_normalization_20/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/ones" + op: "Fill" + input: "bert/encoder/layer_9/output/layer_normalization_20/ones/packed" + input: "bert/encoder/layer_9/output/layer_normalization_20/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_9/output/layer_normalization_20/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/zeros" + op: "Fill" + input: "bert/encoder/layer_9/output/layer_normalization_20/zeros/packed" + input: "bert/encoder/layer_9/output/layer_normalization_20/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_9/output/layer_normalization_20/Reshape" + input: "bert/encoder/layer_9/output/layer_normalization_20/ones" + input: "bert/encoder/layer_9/output/layer_normalization_20/zeros" + input: "bert/encoder/layer_9/output/layer_normalization_20/Const" + input: "bert/encoder/layer_9/output/layer_normalization_20/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_9/output/layer_normalization_20/FusedBatchNormV3" + input: "bert/encoder/layer_9/output/layer_normalization_20/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/mul_2" + op: "Mul" + input: "bert/encoder/layer_9/output/layer_normalization_20/Reshape_1" + input: "bert/encoder/layer_9/output/layer_normalization_20/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/add" + op: "AddV2" + input: "bert/encoder/layer_9/output/layer_normalization_20/mul_2" + input: "bert/encoder/layer_9/output/layer_normalization_20/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_10/attention/self/query/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_10/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_10/attention/self/query/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_10/attention/self/query/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_10/attention/self/query/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/self/query/kernel" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel" + input: "bert/encoder/layer_10/attention/self/query/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/self/query/bias" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias" + input: "bert/encoder/layer_10/attention/self/query/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/MatMul" + op: "MatMul" + input: "bert/encoder/layer_9/output/layer_normalization_20/add" + input: "bert/encoder/layer_10/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_10/attention/self/query/MatMul" + input: "bert/encoder/layer_10/attention/self/query/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_10/attention/self/key/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_10/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_10/attention/self/key/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_10/attention/self/key/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_10/attention/self/key/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/self/key/kernel" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel" + input: "bert/encoder/layer_10/attention/self/key/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/self/key/bias" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias" + input: "bert/encoder/layer_10/attention/self/key/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/MatMul" + op: "MatMul" + input: "bert/encoder/layer_9/output/layer_normalization_20/add" + input: "bert/encoder/layer_10/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_10/attention/self/key/MatMul" + input: "bert/encoder/layer_10/attention/self/key/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_10/attention/self/value/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_10/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_10/attention/self/value/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_10/attention/self/value/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_10/attention/self/value/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/self/value/kernel" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel" + input: "bert/encoder/layer_10/attention/self/value/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/self/value/bias" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias" + input: "bert/encoder/layer_10/attention/self/value/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/MatMul" + op: "MatMul" + input: "bert/encoder/layer_9/output/layer_normalization_20/add" + input: "bert/encoder/layer_10/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_10/attention/self/value/MatMul" + input: "bert/encoder/layer_10/attention/self/value/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/Reshape/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/Reshape" + op: "Reshape" + input: "bert/encoder/layer_10/attention/self/query/BiasAdd" + input: "bert/encoder/layer_10/attention/self/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/transpose/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/transpose" + op: "Transpose" + input: "bert/encoder/layer_10/attention/self/Reshape" + input: "bert/encoder/layer_10/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/Reshape_1/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_10/attention/self/key/BiasAdd" + input: "bert/encoder/layer_10/attention/self/Reshape_1/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/transpose_1/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/transpose_1" + op: "Transpose" + input: "bert/encoder/layer_10/attention/self/Reshape_1" + input: "bert/encoder/layer_10/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/MatMul" + op: "BatchMatMulV2" + input: "bert/encoder/layer_10/attention/self/transpose" + input: "bert/encoder/layer_10/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/Mul/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.125 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/Mul" + op: "Mul" + input: "bert/encoder/layer_10/attention/self/MatMul" + input: "bert/encoder/layer_10/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/ExpandDims/dim" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/ExpandDims" + op: "ExpandDims" + input: "bert/encoder/mul" + input: "bert/encoder/layer_10/attention/self/ExpandDims/dim" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tdim" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/sub/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/sub" + op: "Sub" + input: "bert/encoder/layer_10/attention/self/sub/x" + input: "bert/encoder/layer_10/attention/self/ExpandDims" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/mul_1/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: -10000.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/mul_1" + op: "Mul" + input: "bert/encoder/layer_10/attention/self/sub" + input: "bert/encoder/layer_10/attention/self/mul_1/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/add" + op: "AddV2" + input: "bert/encoder/layer_10/attention/self/Mul" + input: "bert/encoder/layer_10/attention/self/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/Softmax" + op: "Softmax" + input: "bert/encoder/layer_10/attention/self/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_10/attention/self/Softmax" + input: "bert/encoder/layer_10/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_10/attention/self/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_10/attention/self/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_10/attention/self/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_10/attention/self/dropout/GreaterEqual" + input: "bert/encoder/layer_10/attention/self/dropout/Mul" + input: "bert/encoder/layer_10/attention/self/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/Reshape_2/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/Reshape_2" + op: "Reshape" + input: "bert/encoder/layer_10/attention/self/value/BiasAdd" + input: "bert/encoder/layer_10/attention/self/Reshape_2/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/transpose_2/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/transpose_2" + op: "Transpose" + input: "bert/encoder/layer_10/attention/self/Reshape_2" + input: "bert/encoder/layer_10/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_10/attention/self/dropout/SelectV2" + input: "bert/encoder/layer_10/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/transpose_3/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/transpose_3" + op: "Transpose" + input: "bert/encoder/layer_10/attention/self/MatMul_1" + input: "bert/encoder/layer_10/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/Reshape_3/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/Reshape_3" + op: "Reshape" + input: "bert/encoder/layer_10/attention/self/transpose_3" + input: "bert/encoder/layer_10/attention/self/Reshape_3/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_10/attention/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_10/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_10/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_10/attention/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_10/attention/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel" + input: "bert/encoder/layer_10/attention/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias" + input: "bert/encoder/layer_10/attention/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_10/attention/self/Reshape_3" + input: "bert/encoder/layer_10/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_10/attention/output/dense/MatMul" + input: "bert/encoder/layer_10/attention/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_10/attention/output/dense/BiasAdd" + input: "bert/encoder/layer_10/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_10/attention/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_10/attention/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_10/attention/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_10/attention/output/dropout/GreaterEqual" + input: "bert/encoder/layer_10/attention/output/dropout/Mul" + input: "bert/encoder/layer_10/attention/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/add" + op: "AddV2" + input: "bert/encoder/layer_10/attention/output/dropout/SelectV2" + input: "bert/encoder/layer_9/output/layer_normalization_20/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma" + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta" + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/Shape" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/strided_slice/stack" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/strided_slice/stack_1" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/mul" + op: "Mul" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/mul/x" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/Shape" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/strided_slice_1/stack" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/strided_slice_1/stack_1" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/mul_1" + op: "Mul" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/mul_1/x" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape/shape/0" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/mul" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/mul_1" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape" + op: "Reshape" + input: "bert/encoder/layer_10/attention/output/add" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/ones/packed" + op: "Pack" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/ones" + op: "Fill" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/ones/packed" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/zeros" + op: "Fill" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/zeros/packed" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/ones" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/zeros" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/Const" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/FusedBatchNormV3" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2" + op: "Mul" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape_1" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/add" + op: "AddV2" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_10/intermediate/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_10/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_10/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_10/intermediate/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_10/intermediate/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/intermediate/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel" + input: "bert/encoder/layer_10/intermediate/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_10/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_10/intermediate/dense/bias/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/intermediate/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias" + input: "bert/encoder/layer_10/intermediate/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/add" + input: "bert/encoder/layer_10/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_10/intermediate/dense/MatMul" + input: "bert/encoder/layer_10/intermediate/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/Pow/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 3.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/Pow" + op: "Pow" + input: "bert/encoder/layer_10/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_10/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.044714998453855515 + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/mul" + op: "Mul" + input: "bert/encoder/layer_10/intermediate/dense/mul/x" + input: "bert/encoder/layer_10/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/add" + op: "AddV2" + input: "bert/encoder/layer_10/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_10/intermediate/dense/mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.7978845834732056 + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/mul_1" + op: "Mul" + input: "bert/encoder/layer_10/intermediate/dense/mul_1/x" + input: "bert/encoder/layer_10/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/Tanh" + op: "Tanh" + input: "bert/encoder/layer_10/intermediate/dense/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/add_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/add_1" + op: "AddV2" + input: "bert/encoder/layer_10/intermediate/dense/add_1/x" + input: "bert/encoder/layer_10/intermediate/dense/Tanh" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/mul_2/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.5 + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/mul_2" + op: "Mul" + input: "bert/encoder/layer_10/intermediate/dense/mul_2/x" + input: "bert/encoder/layer_10/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/mul_3" + op: "Mul" + input: "bert/encoder/layer_10/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_10/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_10/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_10/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_10/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_10/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_10/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel" + input: "bert/encoder/layer_10/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/dense/bias" + input: "bert/encoder/layer_10/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_10/intermediate/dense/mul_3" + input: "bert/encoder/layer_10/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_10/output/dense/MatMul" + input: "bert/encoder/layer_10/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_10/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_10/output/dense/BiasAdd" + input: "bert/encoder/layer_10/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_10/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_10/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_10/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_10/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_10/output/dropout/GreaterEqual" + input: "bert/encoder/layer_10/output/dropout/Mul" + input: "bert/encoder/layer_10/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/add" + op: "AddV2" + input: "bert/encoder/layer_10/output/dropout/SelectV2" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/output/layer_normalization_22/gamma" + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/output/layer_normalization_22/beta" + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_10/output/layer_normalization_22/Shape" + input: "bert/encoder/layer_10/output/layer_normalization_22/strided_slice/stack" + input: "bert/encoder/layer_10/output/layer_normalization_22/strided_slice/stack_1" + input: "bert/encoder/layer_10/output/layer_normalization_22/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/mul" + op: "Mul" + input: "bert/encoder/layer_10/output/layer_normalization_22/mul/x" + input: "bert/encoder/layer_10/output/layer_normalization_22/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_10/output/layer_normalization_22/Shape" + input: "bert/encoder/layer_10/output/layer_normalization_22/strided_slice_1/stack" + input: "bert/encoder/layer_10/output/layer_normalization_22/strided_slice_1/stack_1" + input: "bert/encoder/layer_10/output/layer_normalization_22/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/mul_1" + op: "Mul" + input: "bert/encoder/layer_10/output/layer_normalization_22/mul_1/x" + input: "bert/encoder/layer_10/output/layer_normalization_22/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_10/output/layer_normalization_22/Reshape/shape/0" + input: "bert/encoder/layer_10/output/layer_normalization_22/mul" + input: "bert/encoder/layer_10/output/layer_normalization_22/mul_1" + input: "bert/encoder/layer_10/output/layer_normalization_22/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/Reshape" + op: "Reshape" + input: "bert/encoder/layer_10/output/add" + input: "bert/encoder/layer_10/output/layer_normalization_22/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/ones/packed" + op: "Pack" + input: "bert/encoder/layer_10/output/layer_normalization_22/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/ones" + op: "Fill" + input: "bert/encoder/layer_10/output/layer_normalization_22/ones/packed" + input: "bert/encoder/layer_10/output/layer_normalization_22/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_10/output/layer_normalization_22/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/zeros" + op: "Fill" + input: "bert/encoder/layer_10/output/layer_normalization_22/zeros/packed" + input: "bert/encoder/layer_10/output/layer_normalization_22/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_10/output/layer_normalization_22/Reshape" + input: "bert/encoder/layer_10/output/layer_normalization_22/ones" + input: "bert/encoder/layer_10/output/layer_normalization_22/zeros" + input: "bert/encoder/layer_10/output/layer_normalization_22/Const" + input: "bert/encoder/layer_10/output/layer_normalization_22/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_10/output/layer_normalization_22/FusedBatchNormV3" + input: "bert/encoder/layer_10/output/layer_normalization_22/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/mul_2" + op: "Mul" + input: "bert/encoder/layer_10/output/layer_normalization_22/Reshape_1" + input: "bert/encoder/layer_10/output/layer_normalization_22/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/add" + op: "AddV2" + input: "bert/encoder/layer_10/output/layer_normalization_22/mul_2" + input: "bert/encoder/layer_10/output/layer_normalization_22/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_11/attention/self/query/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_11/attention/self/query/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_11/attention/self/query/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_11/attention/self/query/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_11/attention/self/query/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/self/query/kernel" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel" + input: "bert/encoder/layer_11/attention/self/query/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/self/query/bias" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias" + input: "bert/encoder/layer_11/attention/self/query/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/MatMul" + op: "MatMul" + input: "bert/encoder/layer_10/output/layer_normalization_22/add" + input: "bert/encoder/layer_11/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_11/attention/self/query/MatMul" + input: "bert/encoder/layer_11/attention/self/query/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_11/attention/self/key/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_11/attention/self/key/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_11/attention/self/key/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_11/attention/self/key/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_11/attention/self/key/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/self/key/kernel" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel" + input: "bert/encoder/layer_11/attention/self/key/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/self/key/bias" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias" + input: "bert/encoder/layer_11/attention/self/key/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/MatMul" + op: "MatMul" + input: "bert/encoder/layer_10/output/layer_normalization_22/add" + input: "bert/encoder/layer_11/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_11/attention/self/key/MatMul" + input: "bert/encoder/layer_11/attention/self/key/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_11/attention/self/value/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_11/attention/self/value/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_11/attention/self/value/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_11/attention/self/value/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_11/attention/self/value/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/self/value/kernel" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel" + input: "bert/encoder/layer_11/attention/self/value/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/self/value/bias" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias" + input: "bert/encoder/layer_11/attention/self/value/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/MatMul" + op: "MatMul" + input: "bert/encoder/layer_10/output/layer_normalization_22/add" + input: "bert/encoder/layer_11/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_11/attention/self/value/MatMul" + input: "bert/encoder/layer_11/attention/self/value/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/Reshape/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/Reshape" + op: "Reshape" + input: "bert/encoder/layer_11/attention/self/query/BiasAdd" + input: "bert/encoder/layer_11/attention/self/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/transpose/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/transpose" + op: "Transpose" + input: "bert/encoder/layer_11/attention/self/Reshape" + input: "bert/encoder/layer_11/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/Reshape_1/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_11/attention/self/key/BiasAdd" + input: "bert/encoder/layer_11/attention/self/Reshape_1/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/transpose_1/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/transpose_1" + op: "Transpose" + input: "bert/encoder/layer_11/attention/self/Reshape_1" + input: "bert/encoder/layer_11/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/MatMul" + op: "BatchMatMulV2" + input: "bert/encoder/layer_11/attention/self/transpose" + input: "bert/encoder/layer_11/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/Mul/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.125 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/Mul" + op: "Mul" + input: "bert/encoder/layer_11/attention/self/MatMul" + input: "bert/encoder/layer_11/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/ExpandDims/dim" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/ExpandDims" + op: "ExpandDims" + input: "bert/encoder/mul" + input: "bert/encoder/layer_11/attention/self/ExpandDims/dim" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tdim" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/sub/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/sub" + op: "Sub" + input: "bert/encoder/layer_11/attention/self/sub/x" + input: "bert/encoder/layer_11/attention/self/ExpandDims" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/mul_1/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: -10000.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/mul_1" + op: "Mul" + input: "bert/encoder/layer_11/attention/self/sub" + input: "bert/encoder/layer_11/attention/self/mul_1/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/add" + op: "AddV2" + input: "bert/encoder/layer_11/attention/self/Mul" + input: "bert/encoder/layer_11/attention/self/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/Softmax" + op: "Softmax" + input: "bert/encoder/layer_11/attention/self/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_11/attention/self/Softmax" + input: "bert/encoder/layer_11/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_11/attention/self/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_11/attention/self/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_11/attention/self/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_11/attention/self/dropout/GreaterEqual" + input: "bert/encoder/layer_11/attention/self/dropout/Mul" + input: "bert/encoder/layer_11/attention/self/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/Reshape_2/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/Reshape_2" + op: "Reshape" + input: "bert/encoder/layer_11/attention/self/value/BiasAdd" + input: "bert/encoder/layer_11/attention/self/Reshape_2/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/transpose_2/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/transpose_2" + op: "Transpose" + input: "bert/encoder/layer_11/attention/self/Reshape_2" + input: "bert/encoder/layer_11/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_11/attention/self/dropout/SelectV2" + input: "bert/encoder/layer_11/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/transpose_3/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\002\000\000\000\001\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/transpose_3" + op: "Transpose" + input: "bert/encoder/layer_11/attention/self/MatMul_1" + input: "bert/encoder/layer_11/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/Reshape_3/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/Reshape_3" + op: "Reshape" + input: "bert/encoder/layer_11/attention/self/transpose_3" + input: "bert/encoder/layer_11/attention/self/Reshape_3/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_11/attention/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_11/attention/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_11/attention/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_11/attention/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_11/attention/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel" + input: "bert/encoder/layer_11/attention/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias" + input: "bert/encoder/layer_11/attention/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_11/attention/self/Reshape_3" + input: "bert/encoder/layer_11/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_11/attention/output/dense/MatMul" + input: "bert/encoder/layer_11/attention/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_11/attention/output/dense/BiasAdd" + input: "bert/encoder/layer_11/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_11/attention/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_11/attention/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_11/attention/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_11/attention/output/dropout/GreaterEqual" + input: "bert/encoder/layer_11/attention/output/dropout/Mul" + input: "bert/encoder/layer_11/attention/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/add" + op: "AddV2" + input: "bert/encoder/layer_11/attention/output/dropout/SelectV2" + input: "bert/encoder/layer_10/output/layer_normalization_22/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma" + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta" + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/Shape" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/strided_slice/stack" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/strided_slice/stack_1" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/mul" + op: "Mul" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/mul/x" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/Shape" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/strided_slice_1/stack" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/strided_slice_1/stack_1" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/mul_1" + op: "Mul" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/mul_1/x" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape/shape/0" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/mul" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/mul_1" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape" + op: "Reshape" + input: "bert/encoder/layer_11/attention/output/add" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/ones/packed" + op: "Pack" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/ones" + op: "Fill" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/ones/packed" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/zeros" + op: "Fill" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/zeros/packed" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/ones" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/zeros" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/Const" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/FusedBatchNormV3" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2" + op: "Mul" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape_1" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/add" + op: "AddV2" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_11/intermediate/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_11/intermediate/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_11/intermediate/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_11/intermediate/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_11/intermediate/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/intermediate/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel" + input: "bert/encoder/layer_11/intermediate/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_11/intermediate/dense/bias/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_11/intermediate/dense/bias/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/intermediate/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias" + input: "bert/encoder/layer_11/intermediate/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/add" + input: "bert/encoder/layer_11/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_11/intermediate/dense/MatMul" + input: "bert/encoder/layer_11/intermediate/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/Pow/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 3.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/Pow" + op: "Pow" + input: "bert/encoder/layer_11/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_11/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.044714998453855515 + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/mul" + op: "Mul" + input: "bert/encoder/layer_11/intermediate/dense/mul/x" + input: "bert/encoder/layer_11/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/add" + op: "AddV2" + input: "bert/encoder/layer_11/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_11/intermediate/dense/mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.7978845834732056 + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/mul_1" + op: "Mul" + input: "bert/encoder/layer_11/intermediate/dense/mul_1/x" + input: "bert/encoder/layer_11/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/Tanh" + op: "Tanh" + input: "bert/encoder/layer_11/intermediate/dense/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/add_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/add_1" + op: "AddV2" + input: "bert/encoder/layer_11/intermediate/dense/add_1/x" + input: "bert/encoder/layer_11/intermediate/dense/Tanh" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/mul_2/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.5 + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/mul_2" + op: "Mul" + input: "bert/encoder/layer_11/intermediate/dense/mul_2/x" + input: "bert/encoder/layer_11/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/mul_3" + op: "Mul" + input: "bert/encoder/layer_11/intermediate/dense/BiasAdd" + input: "bert/encoder/layer_11/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/encoder/layer_11/output/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/encoder/layer_11/output/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/encoder/layer_11/output/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/encoder/layer_11/output/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/encoder/layer_11/output/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/output/dense/kernel" + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel" + input: "bert/encoder/layer_11/output/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/output/dense/bias" + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/dense/bias" + input: "bert/encoder/layer_11/output/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/MatMul" + op: "MatMul" + input: "bert/encoder/layer_11/intermediate/dense/mul_3" + input: "bert/encoder/layer_11/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/BiasAdd" + op: "BiasAdd" + input: "bert/encoder/layer_11/output/dense/MatMul" + input: "bert/encoder/layer_11/output/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/encoder/layer_11/output/dropout/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.1111111640930176 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dropout/Mul" + op: "Mul" + input: "bert/encoder/layer_11/output/dense/BiasAdd" + input: "bert/encoder/layer_11/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dropout/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dropout/random_uniform/RandomUniform" + op: "RandomUniform" + input: "bert/encoder/layer_11/output/dropout/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_11/output/dropout/GreaterEqual/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dropout/GreaterEqual" + op: "GreaterEqual" + input: "bert/encoder/layer_11/output/dropout/random_uniform/RandomUniform" + input: "bert/encoder/layer_11/output/dropout/GreaterEqual/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dropout/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dropout/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_11/output/dropout/GreaterEqual" + input: "bert/encoder/layer_11/output/dropout/Mul" + input: "bert/encoder/layer_11/output/dropout/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/add" + op: "AddV2" + input: "bert/encoder/layer_11/output/dropout/SelectV2" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/gamma/Initializer/ones" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/gamma" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/gamma" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/output/layer_normalization_24/gamma" + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/gamma/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/gamma/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/Initializer/ones" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/gamma/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/beta/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/beta" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/beta" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/output/layer_normalization_24/beta" + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/beta/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/beta/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/beta/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/Shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/strided_slice" + op: "StridedSlice" + input: "bert/encoder/layer_11/output/layer_normalization_24/Shape" + input: "bert/encoder/layer_11/output/layer_normalization_24/strided_slice/stack" + input: "bert/encoder/layer_11/output/layer_normalization_24/strided_slice/stack_1" + input: "bert/encoder/layer_11/output/layer_normalization_24/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/mul" + op: "Mul" + input: "bert/encoder/layer_11/output/layer_normalization_24/mul/x" + input: "bert/encoder/layer_11/output/layer_normalization_24/strided_slice" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/strided_slice_1/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/strided_slice_1/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 2 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/strided_slice_1/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/strided_slice_1" + op: "StridedSlice" + input: "bert/encoder/layer_11/output/layer_normalization_24/Shape" + input: "bert/encoder/layer_11/output/layer_normalization_24/strided_slice_1/stack" + input: "bert/encoder/layer_11/output/layer_normalization_24/strided_slice_1/stack_1" + input: "bert/encoder/layer_11/output/layer_normalization_24/strided_slice_1/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/mul_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/mul_1" + op: "Mul" + input: "bert/encoder/layer_11/output/layer_normalization_24/mul_1/x" + input: "bert/encoder/layer_11/output/layer_normalization_24/strided_slice_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/Reshape/shape/0" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/Reshape/shape/3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/Reshape/shape" + op: "Pack" + input: "bert/encoder/layer_11/output/layer_normalization_24/Reshape/shape/0" + input: "bert/encoder/layer_11/output/layer_normalization_24/mul" + input: "bert/encoder/layer_11/output/layer_normalization_24/mul_1" + input: "bert/encoder/layer_11/output/layer_normalization_24/Reshape/shape/3" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/Reshape" + op: "Reshape" + input: "bert/encoder/layer_11/output/add" + input: "bert/encoder/layer_11/output/layer_normalization_24/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/ones/packed" + op: "Pack" + input: "bert/encoder/layer_11/output/layer_normalization_24/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/ones/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/ones" + op: "Fill" + input: "bert/encoder/layer_11/output/layer_normalization_24/ones/packed" + input: "bert/encoder/layer_11/output/layer_normalization_24/ones/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/zeros/packed" + op: "Pack" + input: "bert/encoder/layer_11/output/layer_normalization_24/mul" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/zeros/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/zeros" + op: "Fill" + input: "bert/encoder/layer_11/output/layer_normalization_24/zeros/packed" + input: "bert/encoder/layer_11/output/layer_normalization_24/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/FusedBatchNormV3" + op: "FusedBatchNormV3" + input: "bert/encoder/layer_11/output/layer_normalization_24/Reshape" + input: "bert/encoder/layer_11/output/layer_normalization_24/ones" + input: "bert/encoder/layer_11/output/layer_normalization_24/zeros" + input: "bert/encoder/layer_11/output/layer_normalization_24/Const" + input: "bert/encoder/layer_11/output/layer_normalization_24/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "exponential_avg_factor" + value { + f: 1.0 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/Reshape_1" + op: "Reshape" + input: "bert/encoder/layer_11/output/layer_normalization_24/FusedBatchNormV3" + input: "bert/encoder/layer_11/output/layer_normalization_24/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/mul_2" + op: "Mul" + input: "bert/encoder/layer_11/output/layer_normalization_24/Reshape_1" + input: "bert/encoder/layer_11/output/layer_normalization_24/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/add/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/add" + op: "AddV2" + input: "bert/encoder/layer_11/output/layer_normalization_24/mul_2" + input: "bert/encoder/layer_11/output/layer_normalization_24/add/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/Reshape_2/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/Reshape_2" + op: "Reshape" + input: "bert/encoder/layer_0/output/layer_normalization_2/add" + input: "bert/encoder/Reshape_2/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/Reshape_3/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/Reshape_3" + op: "Reshape" + input: "bert/encoder/layer_1/output/layer_normalization_4/add" + input: "bert/encoder/Reshape_3/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/Reshape_4/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/Reshape_4" + op: "Reshape" + input: "bert/encoder/layer_2/output/layer_normalization_6/add" + input: "bert/encoder/Reshape_4/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/Reshape_5/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/Reshape_5" + op: "Reshape" + input: "bert/encoder/layer_3/output/layer_normalization_8/add" + input: "bert/encoder/Reshape_5/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/Reshape_6/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/Reshape_6" + op: "Reshape" + input: "bert/encoder/layer_4/output/layer_normalization_10/add" + input: "bert/encoder/Reshape_6/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/Reshape_7/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/Reshape_7" + op: "Reshape" + input: "bert/encoder/layer_5/output/layer_normalization_12/add" + input: "bert/encoder/Reshape_7/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/Reshape_8/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/Reshape_8" + op: "Reshape" + input: "bert/encoder/layer_6/output/layer_normalization_14/add" + input: "bert/encoder/Reshape_8/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/Reshape_9/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/Reshape_9" + op: "Reshape" + input: "bert/encoder/layer_7/output/layer_normalization_16/add" + input: "bert/encoder/Reshape_9/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/Reshape_10/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/Reshape_10" + op: "Reshape" + input: "bert/encoder/layer_8/output/layer_normalization_18/add" + input: "bert/encoder/Reshape_10/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/Reshape_11/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/Reshape_11" + op: "Reshape" + input: "bert/encoder/layer_9/output/layer_normalization_20/add" + input: "bert/encoder/Reshape_11/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/Reshape_12/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/Reshape_12" + op: "Reshape" + input: "bert/encoder/layer_10/output/layer_normalization_22/add" + input: "bert/encoder/Reshape_12/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/encoder/Reshape_13/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/Reshape_13" + op: "Reshape" + input: "bert/encoder/layer_11/output/layer_normalization_24/add" + input: "bert/encoder/Reshape_13/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/pooler/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\000\000\000\000\000\000\000\000\000\000\000\000" + } + } + } +} +node { + name: "bert/pooler/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\000\000\000\000" + } + } + } +} +node { + name: "bert/pooler/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\001\000\000\000\001\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "bert/pooler/strided_slice" + op: "StridedSlice" + input: "bert/encoder/Reshape_13" + input: "bert/pooler/strided_slice/stack" + input: "bert/pooler/strided_slice/stack_1" + input: "bert/pooler/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "begin_mask" + value { + i: 5 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 5 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 0 + } + } +} +node { + name: "bert/pooler/Squeeze" + op: "Squeeze" + input: "bert/pooler/strided_slice" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "squeeze_dims" + value { + list { + i: 1 + } + } + } +} +node { + name: "bert/pooler/dense/kernel/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/pooler/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/pooler/dense/kernel/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/pooler/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/pooler/dense/kernel/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/pooler/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "bert/pooler/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "bert/pooler/dense/kernel/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/pooler/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "bert/pooler/dense/kernel/Initializer/truncated_normal/mul" + op: "Mul" + input: "bert/pooler/dense/kernel/Initializer/truncated_normal/TruncatedNormal" + input: "bert/pooler/dense/kernel/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/pooler/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/pooler/dense/kernel/Initializer/truncated_normal" + op: "AddV2" + input: "bert/pooler/dense/kernel/Initializer/truncated_normal/mul" + input: "bert/pooler/dense/kernel/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/pooler/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "bert/pooler/dense/kernel" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/pooler/dense/kernel" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/pooler/dense/kernel" + } + } +} +node { + name: "bert/pooler/dense/kernel/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/pooler/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/pooler/dense/kernel/Assign" + op: "AssignVariableOp" + input: "bert/pooler/dense/kernel" + input: "bert/pooler/dense/kernel/Initializer/truncated_normal" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/pooler/dense/kernel/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/pooler/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/pooler/dense/bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/pooler/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/pooler/dense/bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/pooler/dense/bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/pooler/dense/bias" + } + } +} +node { + name: "bert/pooler/dense/bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/pooler/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/pooler/dense/bias/Assign" + op: "AssignVariableOp" + input: "bert/pooler/dense/bias" + input: "bert/pooler/dense/bias/Initializer/zeros" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/pooler/dense/bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/pooler/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/pooler/dense/MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/pooler/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/pooler/dense/MatMul" + op: "MatMul" + input: "bert/pooler/Squeeze" + input: "bert/pooler/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "bert/pooler/dense/BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/pooler/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/pooler/dense/BiasAdd" + op: "BiasAdd" + input: "bert/pooler/dense/MatMul" + input: "bert/pooler/dense/BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "bert/pooler/dense/Tanh" + op: "Tanh" + input: "bert/pooler/dense/BiasAdd" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "cls/squad/output_weights/Initializer/truncated_normal/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_weights" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\002\000\000\000\000\003\000\000" + } + } + } +} +node { + name: "cls/squad/output_weights/Initializer/truncated_normal/mean" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_weights" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "cls/squad/output_weights/Initializer/truncated_normal/stddev" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_weights" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.019999999552965164 + } + } + } +} +node { + name: "cls/squad/output_weights/Initializer/truncated_normal/TruncatedNormal" + op: "TruncatedNormal" + input: "cls/squad/output_weights/Initializer/truncated_normal/shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_weights" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "seed" + value { + i: 0 + } + } + attr { + key: "seed2" + value { + i: 0 + } + } +} +node { + name: "cls/squad/output_weights/Initializer/truncated_normal/mul" + op: "Mul" + input: "cls/squad/output_weights/Initializer/truncated_normal/TruncatedNormal" + input: "cls/squad/output_weights/Initializer/truncated_normal/stddev" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_weights" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "cls/squad/output_weights/Initializer/truncated_normal" + op: "AddV2" + input: "cls/squad/output_weights/Initializer/truncated_normal/mul" + input: "cls/squad/output_weights/Initializer/truncated_normal/mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_weights" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "cls/squad/output_weights" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_weights" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "cls/squad/output_weights" + } + } +} +node { + name: "cls/squad/output_weights/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "cls/squad/output_weights" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "cls/squad/output_weights/Assign" + op: "AssignVariableOp" + input: "cls/squad/output_weights" + input: "cls/squad/output_weights/Initializer/truncated_normal" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "cls/squad/output_weights/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "cls/squad/output_weights" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "cls/squad/output_bias/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 2 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "cls/squad/output_bias" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_bias" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 2 + } + } + } + } + attr { + key: "shared_name" + value { + s: "cls/squad/output_bias" + } + } +} +node { + name: "cls/squad/output_bias/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "cls/squad/output_bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "cls/squad/output_bias/Assign" + op: "AssignVariableOp" + input: "cls/squad/output_bias" + input: "cls/squad/output_bias/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "cls/squad/output_bias/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "cls/squad/output_bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Reshape/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "Reshape" + op: "Reshape" + input: "bert/encoder/Reshape_13" + input: "Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "MatMul/ReadVariableOp" + op: "ReadVariableOp" + input: "cls/squad/output_weights" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "MatMul" + op: "MatMul" + input: "Reshape" + input: "MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 2 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "BiasAdd/ReadVariableOp" + op: "ReadVariableOp" + input: "cls/squad/output_bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "BiasAdd" + op: "BiasAdd" + input: "MatMul" + input: "BiasAdd/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 2 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "Reshape_1/shape" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\002\000\000\000" + } + } + } +} +node { + name: "Reshape_1" + op: "Reshape" + input: "BiasAdd" + input: "Reshape_1/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 2 + } + } + } + } + } +} +node { + name: "transpose/perm" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\002\000\000\000\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "transpose" + op: "Transpose" + input: "Reshape_1" + input: "transpose/perm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "unstack" + op: "Unpack" + input: "transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } + attr { + key: "num" + value { + i: 2 + } + } +} +node { + name: "checkpoint_initializer/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/embeddings/position_embeddings" + } + } + } +} +node { + name: "checkpoint_initializer/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer" + op: "RestoreV2" + input: "checkpoint_initializer/prefix" + input: "checkpoint_initializer/tensor_names" + input: "checkpoint_initializer/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity" + op: "Identity" + input: "checkpoint_initializer" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp" + op: "AssignVariableOp" + input: "bert/embeddings/position_embeddings" + input: "Identity" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_1/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_1/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/embeddings/token_type_embeddings" + } + } + } +} +node { + name: "checkpoint_initializer_1/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_1" + op: "RestoreV2" + input: "checkpoint_initializer_1/prefix" + input: "checkpoint_initializer_1/tensor_names" + input: "checkpoint_initializer_1/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_1" + op: "Identity" + input: "checkpoint_initializer_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_1" + op: "AssignVariableOp" + input: "bert/embeddings/token_type_embeddings" + input: "Identity_1" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_2/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_2/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/embeddings/word_embeddings" + } + } + } +} +node { + name: "checkpoint_initializer_2/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_2" + op: "RestoreV2" + input: "checkpoint_initializer_2/prefix" + input: "checkpoint_initializer_2/tensor_names" + input: "checkpoint_initializer_2/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_2" + op: "Identity" + input: "checkpoint_initializer_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_2" + op: "AssignVariableOp" + input: "bert/embeddings/word_embeddings" + input: "Identity_2" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_3/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_3/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_0/attention/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_3/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_3" + op: "RestoreV2" + input: "checkpoint_initializer_3/prefix" + input: "checkpoint_initializer_3/tensor_names" + input: "checkpoint_initializer_3/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_3" + op: "Identity" + input: "checkpoint_initializer_3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_3" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias" + input: "Identity_3" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_4/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_4/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_0/attention/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_4/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_4" + op: "RestoreV2" + input: "checkpoint_initializer_4/prefix" + input: "checkpoint_initializer_4/tensor_names" + input: "checkpoint_initializer_4/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_4" + op: "Identity" + input: "checkpoint_initializer_4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_4" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel" + input: "Identity_4" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_5/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_5/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_0/attention/self/key/bias" + } + } + } +} +node { + name: "checkpoint_initializer_5/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_5" + op: "RestoreV2" + input: "checkpoint_initializer_5/prefix" + input: "checkpoint_initializer_5/tensor_names" + input: "checkpoint_initializer_5/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_5" + op: "Identity" + input: "checkpoint_initializer_5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_5" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias" + input: "Identity_5" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_6/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_6/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_0/attention/self/key/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_6/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_6" + op: "RestoreV2" + input: "checkpoint_initializer_6/prefix" + input: "checkpoint_initializer_6/tensor_names" + input: "checkpoint_initializer_6/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_6" + op: "Identity" + input: "checkpoint_initializer_6" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_6" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel" + input: "Identity_6" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_7/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_7/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_0/attention/self/query/bias" + } + } + } +} +node { + name: "checkpoint_initializer_7/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_7" + op: "RestoreV2" + input: "checkpoint_initializer_7/prefix" + input: "checkpoint_initializer_7/tensor_names" + input: "checkpoint_initializer_7/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_7" + op: "Identity" + input: "checkpoint_initializer_7" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_7" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias" + input: "Identity_7" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_8/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_8/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_0/attention/self/query/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_8/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_8" + op: "RestoreV2" + input: "checkpoint_initializer_8/prefix" + input: "checkpoint_initializer_8/tensor_names" + input: "checkpoint_initializer_8/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_8" + op: "Identity" + input: "checkpoint_initializer_8" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_8" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel" + input: "Identity_8" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_9/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_9/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_0/attention/self/value/bias" + } + } + } +} +node { + name: "checkpoint_initializer_9/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_9" + op: "RestoreV2" + input: "checkpoint_initializer_9/prefix" + input: "checkpoint_initializer_9/tensor_names" + input: "checkpoint_initializer_9/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_9" + op: "Identity" + input: "checkpoint_initializer_9" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_9" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias" + input: "Identity_9" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_10/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_10/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_0/attention/self/value/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_10/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_10" + op: "RestoreV2" + input: "checkpoint_initializer_10/prefix" + input: "checkpoint_initializer_10/tensor_names" + input: "checkpoint_initializer_10/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_10" + op: "Identity" + input: "checkpoint_initializer_10" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_10" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel" + input: "Identity_10" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_11/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_11/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_0/intermediate/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_11/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_11" + op: "RestoreV2" + input: "checkpoint_initializer_11/prefix" + input: "checkpoint_initializer_11/tensor_names" + input: "checkpoint_initializer_11/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_11" + op: "Identity" + input: "checkpoint_initializer_11" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_11" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias" + input: "Identity_11" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_12/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_12/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_0/intermediate/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_12/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_12" + op: "RestoreV2" + input: "checkpoint_initializer_12/prefix" + input: "checkpoint_initializer_12/tensor_names" + input: "checkpoint_initializer_12/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_12" + op: "Identity" + input: "checkpoint_initializer_12" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_12" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel" + input: "Identity_12" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_13/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_13/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_0/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_13/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_13" + op: "RestoreV2" + input: "checkpoint_initializer_13/prefix" + input: "checkpoint_initializer_13/tensor_names" + input: "checkpoint_initializer_13/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_13" + op: "Identity" + input: "checkpoint_initializer_13" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_13" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/dense/bias" + input: "Identity_13" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_14/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_14/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_0/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_14/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_14" + op: "RestoreV2" + input: "checkpoint_initializer_14/prefix" + input: "checkpoint_initializer_14/tensor_names" + input: "checkpoint_initializer_14/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_14" + op: "Identity" + input: "checkpoint_initializer_14" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_14" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel" + input: "Identity_14" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_15/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_15/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_1/attention/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_15/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_15" + op: "RestoreV2" + input: "checkpoint_initializer_15/prefix" + input: "checkpoint_initializer_15/tensor_names" + input: "checkpoint_initializer_15/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_15" + op: "Identity" + input: "checkpoint_initializer_15" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_15" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias" + input: "Identity_15" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_16/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_16/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_1/attention/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_16/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_16" + op: "RestoreV2" + input: "checkpoint_initializer_16/prefix" + input: "checkpoint_initializer_16/tensor_names" + input: "checkpoint_initializer_16/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_16" + op: "Identity" + input: "checkpoint_initializer_16" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_16" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel" + input: "Identity_16" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_17/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_17/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_1/attention/self/key/bias" + } + } + } +} +node { + name: "checkpoint_initializer_17/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_17" + op: "RestoreV2" + input: "checkpoint_initializer_17/prefix" + input: "checkpoint_initializer_17/tensor_names" + input: "checkpoint_initializer_17/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_17" + op: "Identity" + input: "checkpoint_initializer_17" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_17" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias" + input: "Identity_17" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_18/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_18/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_1/attention/self/key/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_18/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_18" + op: "RestoreV2" + input: "checkpoint_initializer_18/prefix" + input: "checkpoint_initializer_18/tensor_names" + input: "checkpoint_initializer_18/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_18" + op: "Identity" + input: "checkpoint_initializer_18" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_18" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel" + input: "Identity_18" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_19/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_19/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_1/attention/self/query/bias" + } + } + } +} +node { + name: "checkpoint_initializer_19/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_19" + op: "RestoreV2" + input: "checkpoint_initializer_19/prefix" + input: "checkpoint_initializer_19/tensor_names" + input: "checkpoint_initializer_19/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_19" + op: "Identity" + input: "checkpoint_initializer_19" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_19" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias" + input: "Identity_19" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_20/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_20/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_1/attention/self/query/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_20/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_20" + op: "RestoreV2" + input: "checkpoint_initializer_20/prefix" + input: "checkpoint_initializer_20/tensor_names" + input: "checkpoint_initializer_20/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_20" + op: "Identity" + input: "checkpoint_initializer_20" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_20" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel" + input: "Identity_20" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_21/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_21/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_1/attention/self/value/bias" + } + } + } +} +node { + name: "checkpoint_initializer_21/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_21" + op: "RestoreV2" + input: "checkpoint_initializer_21/prefix" + input: "checkpoint_initializer_21/tensor_names" + input: "checkpoint_initializer_21/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_21" + op: "Identity" + input: "checkpoint_initializer_21" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_21" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias" + input: "Identity_21" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_22/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_22/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_1/attention/self/value/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_22/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_22" + op: "RestoreV2" + input: "checkpoint_initializer_22/prefix" + input: "checkpoint_initializer_22/tensor_names" + input: "checkpoint_initializer_22/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_22" + op: "Identity" + input: "checkpoint_initializer_22" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_22" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel" + input: "Identity_22" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_23/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_23/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_1/intermediate/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_23/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_23" + op: "RestoreV2" + input: "checkpoint_initializer_23/prefix" + input: "checkpoint_initializer_23/tensor_names" + input: "checkpoint_initializer_23/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_23" + op: "Identity" + input: "checkpoint_initializer_23" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_23" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias" + input: "Identity_23" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_24/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_24/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_1/intermediate/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_24/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_24" + op: "RestoreV2" + input: "checkpoint_initializer_24/prefix" + input: "checkpoint_initializer_24/tensor_names" + input: "checkpoint_initializer_24/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_24" + op: "Identity" + input: "checkpoint_initializer_24" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_24" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel" + input: "Identity_24" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_25/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_25/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_1/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_25/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_25" + op: "RestoreV2" + input: "checkpoint_initializer_25/prefix" + input: "checkpoint_initializer_25/tensor_names" + input: "checkpoint_initializer_25/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_25" + op: "Identity" + input: "checkpoint_initializer_25" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_25" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/dense/bias" + input: "Identity_25" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_26/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_26/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_1/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_26/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_26" + op: "RestoreV2" + input: "checkpoint_initializer_26/prefix" + input: "checkpoint_initializer_26/tensor_names" + input: "checkpoint_initializer_26/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_26" + op: "Identity" + input: "checkpoint_initializer_26" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_26" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel" + input: "Identity_26" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_27/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_27/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_10/attention/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_27/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_27" + op: "RestoreV2" + input: "checkpoint_initializer_27/prefix" + input: "checkpoint_initializer_27/tensor_names" + input: "checkpoint_initializer_27/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_27" + op: "Identity" + input: "checkpoint_initializer_27" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_27" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias" + input: "Identity_27" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_28/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_28/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_10/attention/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_28/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_28" + op: "RestoreV2" + input: "checkpoint_initializer_28/prefix" + input: "checkpoint_initializer_28/tensor_names" + input: "checkpoint_initializer_28/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_28" + op: "Identity" + input: "checkpoint_initializer_28" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_28" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel" + input: "Identity_28" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_29/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_29/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_10/attention/self/key/bias" + } + } + } +} +node { + name: "checkpoint_initializer_29/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_29" + op: "RestoreV2" + input: "checkpoint_initializer_29/prefix" + input: "checkpoint_initializer_29/tensor_names" + input: "checkpoint_initializer_29/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_29" + op: "Identity" + input: "checkpoint_initializer_29" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_29" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias" + input: "Identity_29" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_30/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_30/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_10/attention/self/key/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_30/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_30" + op: "RestoreV2" + input: "checkpoint_initializer_30/prefix" + input: "checkpoint_initializer_30/tensor_names" + input: "checkpoint_initializer_30/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_30" + op: "Identity" + input: "checkpoint_initializer_30" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_30" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel" + input: "Identity_30" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_31/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_31/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_10/attention/self/query/bias" + } + } + } +} +node { + name: "checkpoint_initializer_31/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_31" + op: "RestoreV2" + input: "checkpoint_initializer_31/prefix" + input: "checkpoint_initializer_31/tensor_names" + input: "checkpoint_initializer_31/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_31" + op: "Identity" + input: "checkpoint_initializer_31" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_31" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias" + input: "Identity_31" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_32/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_32/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_10/attention/self/query/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_32/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_32" + op: "RestoreV2" + input: "checkpoint_initializer_32/prefix" + input: "checkpoint_initializer_32/tensor_names" + input: "checkpoint_initializer_32/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_32" + op: "Identity" + input: "checkpoint_initializer_32" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_32" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel" + input: "Identity_32" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_33/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_33/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_10/attention/self/value/bias" + } + } + } +} +node { + name: "checkpoint_initializer_33/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_33" + op: "RestoreV2" + input: "checkpoint_initializer_33/prefix" + input: "checkpoint_initializer_33/tensor_names" + input: "checkpoint_initializer_33/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_33" + op: "Identity" + input: "checkpoint_initializer_33" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_33" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias" + input: "Identity_33" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_34/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_34/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_10/attention/self/value/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_34/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_34" + op: "RestoreV2" + input: "checkpoint_initializer_34/prefix" + input: "checkpoint_initializer_34/tensor_names" + input: "checkpoint_initializer_34/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_34" + op: "Identity" + input: "checkpoint_initializer_34" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_34" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel" + input: "Identity_34" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_35/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_35/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_10/intermediate/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_35/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_35" + op: "RestoreV2" + input: "checkpoint_initializer_35/prefix" + input: "checkpoint_initializer_35/tensor_names" + input: "checkpoint_initializer_35/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_35" + op: "Identity" + input: "checkpoint_initializer_35" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_35" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias" + input: "Identity_35" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_36/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_36/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_10/intermediate/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_36/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_36" + op: "RestoreV2" + input: "checkpoint_initializer_36/prefix" + input: "checkpoint_initializer_36/tensor_names" + input: "checkpoint_initializer_36/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_36" + op: "Identity" + input: "checkpoint_initializer_36" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_36" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel" + input: "Identity_36" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_37/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_37/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_10/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_37/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_37" + op: "RestoreV2" + input: "checkpoint_initializer_37/prefix" + input: "checkpoint_initializer_37/tensor_names" + input: "checkpoint_initializer_37/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_37" + op: "Identity" + input: "checkpoint_initializer_37" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_37" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/dense/bias" + input: "Identity_37" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_38/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_38/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_10/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_38/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_38" + op: "RestoreV2" + input: "checkpoint_initializer_38/prefix" + input: "checkpoint_initializer_38/tensor_names" + input: "checkpoint_initializer_38/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_38" + op: "Identity" + input: "checkpoint_initializer_38" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_38" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel" + input: "Identity_38" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_39/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_39/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_11/attention/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_39/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_39" + op: "RestoreV2" + input: "checkpoint_initializer_39/prefix" + input: "checkpoint_initializer_39/tensor_names" + input: "checkpoint_initializer_39/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_39" + op: "Identity" + input: "checkpoint_initializer_39" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_39" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias" + input: "Identity_39" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_40/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_40/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_11/attention/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_40/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_40" + op: "RestoreV2" + input: "checkpoint_initializer_40/prefix" + input: "checkpoint_initializer_40/tensor_names" + input: "checkpoint_initializer_40/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_40" + op: "Identity" + input: "checkpoint_initializer_40" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_40" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel" + input: "Identity_40" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_41/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_41/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_11/attention/self/key/bias" + } + } + } +} +node { + name: "checkpoint_initializer_41/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_41" + op: "RestoreV2" + input: "checkpoint_initializer_41/prefix" + input: "checkpoint_initializer_41/tensor_names" + input: "checkpoint_initializer_41/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_41" + op: "Identity" + input: "checkpoint_initializer_41" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_41" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias" + input: "Identity_41" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_42/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_42/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_11/attention/self/key/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_42/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_42" + op: "RestoreV2" + input: "checkpoint_initializer_42/prefix" + input: "checkpoint_initializer_42/tensor_names" + input: "checkpoint_initializer_42/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_42" + op: "Identity" + input: "checkpoint_initializer_42" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_42" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel" + input: "Identity_42" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_43/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_43/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_11/attention/self/query/bias" + } + } + } +} +node { + name: "checkpoint_initializer_43/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_43" + op: "RestoreV2" + input: "checkpoint_initializer_43/prefix" + input: "checkpoint_initializer_43/tensor_names" + input: "checkpoint_initializer_43/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_43" + op: "Identity" + input: "checkpoint_initializer_43" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_43" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias" + input: "Identity_43" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_44/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_44/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_11/attention/self/query/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_44/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_44" + op: "RestoreV2" + input: "checkpoint_initializer_44/prefix" + input: "checkpoint_initializer_44/tensor_names" + input: "checkpoint_initializer_44/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_44" + op: "Identity" + input: "checkpoint_initializer_44" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_44" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel" + input: "Identity_44" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_45/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_45/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_11/attention/self/value/bias" + } + } + } +} +node { + name: "checkpoint_initializer_45/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_45" + op: "RestoreV2" + input: "checkpoint_initializer_45/prefix" + input: "checkpoint_initializer_45/tensor_names" + input: "checkpoint_initializer_45/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_45" + op: "Identity" + input: "checkpoint_initializer_45" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_45" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias" + input: "Identity_45" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_46/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_46/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_11/attention/self/value/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_46/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_46" + op: "RestoreV2" + input: "checkpoint_initializer_46/prefix" + input: "checkpoint_initializer_46/tensor_names" + input: "checkpoint_initializer_46/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_46" + op: "Identity" + input: "checkpoint_initializer_46" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_46" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel" + input: "Identity_46" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_47/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_47/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_11/intermediate/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_47/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_47" + op: "RestoreV2" + input: "checkpoint_initializer_47/prefix" + input: "checkpoint_initializer_47/tensor_names" + input: "checkpoint_initializer_47/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_47" + op: "Identity" + input: "checkpoint_initializer_47" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_47" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias" + input: "Identity_47" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_48/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_48/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_11/intermediate/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_48/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_48" + op: "RestoreV2" + input: "checkpoint_initializer_48/prefix" + input: "checkpoint_initializer_48/tensor_names" + input: "checkpoint_initializer_48/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_48" + op: "Identity" + input: "checkpoint_initializer_48" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_48" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel" + input: "Identity_48" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_49/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_49/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_11/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_49/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_49" + op: "RestoreV2" + input: "checkpoint_initializer_49/prefix" + input: "checkpoint_initializer_49/tensor_names" + input: "checkpoint_initializer_49/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_49" + op: "Identity" + input: "checkpoint_initializer_49" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_49" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/dense/bias" + input: "Identity_49" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_50/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_50/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_11/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_50/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_50" + op: "RestoreV2" + input: "checkpoint_initializer_50/prefix" + input: "checkpoint_initializer_50/tensor_names" + input: "checkpoint_initializer_50/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_50" + op: "Identity" + input: "checkpoint_initializer_50" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_50" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel" + input: "Identity_50" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_51/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_51/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_2/attention/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_51/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_51" + op: "RestoreV2" + input: "checkpoint_initializer_51/prefix" + input: "checkpoint_initializer_51/tensor_names" + input: "checkpoint_initializer_51/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_51" + op: "Identity" + input: "checkpoint_initializer_51" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_51" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias" + input: "Identity_51" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_52/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_52/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_2/attention/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_52/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_52" + op: "RestoreV2" + input: "checkpoint_initializer_52/prefix" + input: "checkpoint_initializer_52/tensor_names" + input: "checkpoint_initializer_52/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_52" + op: "Identity" + input: "checkpoint_initializer_52" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_52" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel" + input: "Identity_52" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_53/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_53/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_2/attention/self/key/bias" + } + } + } +} +node { + name: "checkpoint_initializer_53/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_53" + op: "RestoreV2" + input: "checkpoint_initializer_53/prefix" + input: "checkpoint_initializer_53/tensor_names" + input: "checkpoint_initializer_53/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_53" + op: "Identity" + input: "checkpoint_initializer_53" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_53" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias" + input: "Identity_53" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_54/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_54/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_2/attention/self/key/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_54/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_54" + op: "RestoreV2" + input: "checkpoint_initializer_54/prefix" + input: "checkpoint_initializer_54/tensor_names" + input: "checkpoint_initializer_54/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_54" + op: "Identity" + input: "checkpoint_initializer_54" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_54" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel" + input: "Identity_54" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_55/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_55/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_2/attention/self/query/bias" + } + } + } +} +node { + name: "checkpoint_initializer_55/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_55" + op: "RestoreV2" + input: "checkpoint_initializer_55/prefix" + input: "checkpoint_initializer_55/tensor_names" + input: "checkpoint_initializer_55/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_55" + op: "Identity" + input: "checkpoint_initializer_55" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_55" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias" + input: "Identity_55" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_56/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_56/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_2/attention/self/query/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_56/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_56" + op: "RestoreV2" + input: "checkpoint_initializer_56/prefix" + input: "checkpoint_initializer_56/tensor_names" + input: "checkpoint_initializer_56/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_56" + op: "Identity" + input: "checkpoint_initializer_56" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_56" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel" + input: "Identity_56" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_57/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_57/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_2/attention/self/value/bias" + } + } + } +} +node { + name: "checkpoint_initializer_57/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_57" + op: "RestoreV2" + input: "checkpoint_initializer_57/prefix" + input: "checkpoint_initializer_57/tensor_names" + input: "checkpoint_initializer_57/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_57" + op: "Identity" + input: "checkpoint_initializer_57" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_57" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias" + input: "Identity_57" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_58/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_58/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_2/attention/self/value/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_58/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_58" + op: "RestoreV2" + input: "checkpoint_initializer_58/prefix" + input: "checkpoint_initializer_58/tensor_names" + input: "checkpoint_initializer_58/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_58" + op: "Identity" + input: "checkpoint_initializer_58" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_58" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel" + input: "Identity_58" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_59/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_59/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_2/intermediate/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_59/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_59" + op: "RestoreV2" + input: "checkpoint_initializer_59/prefix" + input: "checkpoint_initializer_59/tensor_names" + input: "checkpoint_initializer_59/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_59" + op: "Identity" + input: "checkpoint_initializer_59" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_59" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias" + input: "Identity_59" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_60/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_60/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_2/intermediate/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_60/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_60" + op: "RestoreV2" + input: "checkpoint_initializer_60/prefix" + input: "checkpoint_initializer_60/tensor_names" + input: "checkpoint_initializer_60/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_60" + op: "Identity" + input: "checkpoint_initializer_60" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_60" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel" + input: "Identity_60" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_61/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_61/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_2/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_61/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_61" + op: "RestoreV2" + input: "checkpoint_initializer_61/prefix" + input: "checkpoint_initializer_61/tensor_names" + input: "checkpoint_initializer_61/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_61" + op: "Identity" + input: "checkpoint_initializer_61" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_61" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/dense/bias" + input: "Identity_61" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_62/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_62/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_2/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_62/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_62" + op: "RestoreV2" + input: "checkpoint_initializer_62/prefix" + input: "checkpoint_initializer_62/tensor_names" + input: "checkpoint_initializer_62/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_62" + op: "Identity" + input: "checkpoint_initializer_62" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_62" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel" + input: "Identity_62" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_63/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_63/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_3/attention/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_63/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_63" + op: "RestoreV2" + input: "checkpoint_initializer_63/prefix" + input: "checkpoint_initializer_63/tensor_names" + input: "checkpoint_initializer_63/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_63" + op: "Identity" + input: "checkpoint_initializer_63" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_63" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias" + input: "Identity_63" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_64/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_64/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_3/attention/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_64/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_64" + op: "RestoreV2" + input: "checkpoint_initializer_64/prefix" + input: "checkpoint_initializer_64/tensor_names" + input: "checkpoint_initializer_64/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_64" + op: "Identity" + input: "checkpoint_initializer_64" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_64" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel" + input: "Identity_64" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_65/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_65/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_3/attention/self/key/bias" + } + } + } +} +node { + name: "checkpoint_initializer_65/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_65" + op: "RestoreV2" + input: "checkpoint_initializer_65/prefix" + input: "checkpoint_initializer_65/tensor_names" + input: "checkpoint_initializer_65/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_65" + op: "Identity" + input: "checkpoint_initializer_65" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_65" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias" + input: "Identity_65" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_66/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_66/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_3/attention/self/key/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_66/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_66" + op: "RestoreV2" + input: "checkpoint_initializer_66/prefix" + input: "checkpoint_initializer_66/tensor_names" + input: "checkpoint_initializer_66/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_66" + op: "Identity" + input: "checkpoint_initializer_66" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_66" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel" + input: "Identity_66" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_67/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_67/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_3/attention/self/query/bias" + } + } + } +} +node { + name: "checkpoint_initializer_67/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_67" + op: "RestoreV2" + input: "checkpoint_initializer_67/prefix" + input: "checkpoint_initializer_67/tensor_names" + input: "checkpoint_initializer_67/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_67" + op: "Identity" + input: "checkpoint_initializer_67" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_67" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias" + input: "Identity_67" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_68/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_68/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_3/attention/self/query/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_68/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_68" + op: "RestoreV2" + input: "checkpoint_initializer_68/prefix" + input: "checkpoint_initializer_68/tensor_names" + input: "checkpoint_initializer_68/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_68" + op: "Identity" + input: "checkpoint_initializer_68" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_68" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel" + input: "Identity_68" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_69/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_69/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_3/attention/self/value/bias" + } + } + } +} +node { + name: "checkpoint_initializer_69/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_69" + op: "RestoreV2" + input: "checkpoint_initializer_69/prefix" + input: "checkpoint_initializer_69/tensor_names" + input: "checkpoint_initializer_69/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_69" + op: "Identity" + input: "checkpoint_initializer_69" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_69" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias" + input: "Identity_69" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_70/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_70/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_3/attention/self/value/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_70/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_70" + op: "RestoreV2" + input: "checkpoint_initializer_70/prefix" + input: "checkpoint_initializer_70/tensor_names" + input: "checkpoint_initializer_70/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_70" + op: "Identity" + input: "checkpoint_initializer_70" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_70" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel" + input: "Identity_70" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_71/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_71/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_3/intermediate/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_71/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_71" + op: "RestoreV2" + input: "checkpoint_initializer_71/prefix" + input: "checkpoint_initializer_71/tensor_names" + input: "checkpoint_initializer_71/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_71" + op: "Identity" + input: "checkpoint_initializer_71" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_71" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias" + input: "Identity_71" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_72/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_72/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_3/intermediate/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_72/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_72" + op: "RestoreV2" + input: "checkpoint_initializer_72/prefix" + input: "checkpoint_initializer_72/tensor_names" + input: "checkpoint_initializer_72/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_72" + op: "Identity" + input: "checkpoint_initializer_72" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_72" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel" + input: "Identity_72" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_73/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_73/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_3/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_73/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_73" + op: "RestoreV2" + input: "checkpoint_initializer_73/prefix" + input: "checkpoint_initializer_73/tensor_names" + input: "checkpoint_initializer_73/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_73" + op: "Identity" + input: "checkpoint_initializer_73" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_73" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/dense/bias" + input: "Identity_73" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_74/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_74/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_3/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_74/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_74" + op: "RestoreV2" + input: "checkpoint_initializer_74/prefix" + input: "checkpoint_initializer_74/tensor_names" + input: "checkpoint_initializer_74/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_74" + op: "Identity" + input: "checkpoint_initializer_74" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_74" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel" + input: "Identity_74" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_75/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_75/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_4/attention/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_75/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_75" + op: "RestoreV2" + input: "checkpoint_initializer_75/prefix" + input: "checkpoint_initializer_75/tensor_names" + input: "checkpoint_initializer_75/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_75" + op: "Identity" + input: "checkpoint_initializer_75" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_75" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias" + input: "Identity_75" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_76/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_76/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_4/attention/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_76/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_76" + op: "RestoreV2" + input: "checkpoint_initializer_76/prefix" + input: "checkpoint_initializer_76/tensor_names" + input: "checkpoint_initializer_76/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_76" + op: "Identity" + input: "checkpoint_initializer_76" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_76" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel" + input: "Identity_76" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_77/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_77/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_4/attention/self/key/bias" + } + } + } +} +node { + name: "checkpoint_initializer_77/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_77" + op: "RestoreV2" + input: "checkpoint_initializer_77/prefix" + input: "checkpoint_initializer_77/tensor_names" + input: "checkpoint_initializer_77/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_77" + op: "Identity" + input: "checkpoint_initializer_77" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_77" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias" + input: "Identity_77" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_78/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_78/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_4/attention/self/key/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_78/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_78" + op: "RestoreV2" + input: "checkpoint_initializer_78/prefix" + input: "checkpoint_initializer_78/tensor_names" + input: "checkpoint_initializer_78/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_78" + op: "Identity" + input: "checkpoint_initializer_78" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_78" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel" + input: "Identity_78" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_79/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_79/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_4/attention/self/query/bias" + } + } + } +} +node { + name: "checkpoint_initializer_79/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_79" + op: "RestoreV2" + input: "checkpoint_initializer_79/prefix" + input: "checkpoint_initializer_79/tensor_names" + input: "checkpoint_initializer_79/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_79" + op: "Identity" + input: "checkpoint_initializer_79" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_79" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias" + input: "Identity_79" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_80/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_80/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_4/attention/self/query/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_80/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_80" + op: "RestoreV2" + input: "checkpoint_initializer_80/prefix" + input: "checkpoint_initializer_80/tensor_names" + input: "checkpoint_initializer_80/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_80" + op: "Identity" + input: "checkpoint_initializer_80" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_80" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel" + input: "Identity_80" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_81/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_81/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_4/attention/self/value/bias" + } + } + } +} +node { + name: "checkpoint_initializer_81/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_81" + op: "RestoreV2" + input: "checkpoint_initializer_81/prefix" + input: "checkpoint_initializer_81/tensor_names" + input: "checkpoint_initializer_81/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_81" + op: "Identity" + input: "checkpoint_initializer_81" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_81" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias" + input: "Identity_81" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_82/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_82/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_4/attention/self/value/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_82/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_82" + op: "RestoreV2" + input: "checkpoint_initializer_82/prefix" + input: "checkpoint_initializer_82/tensor_names" + input: "checkpoint_initializer_82/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_82" + op: "Identity" + input: "checkpoint_initializer_82" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_82" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel" + input: "Identity_82" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_83/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_83/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_4/intermediate/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_83/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_83" + op: "RestoreV2" + input: "checkpoint_initializer_83/prefix" + input: "checkpoint_initializer_83/tensor_names" + input: "checkpoint_initializer_83/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_83" + op: "Identity" + input: "checkpoint_initializer_83" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_83" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias" + input: "Identity_83" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_84/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_84/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_4/intermediate/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_84/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_84" + op: "RestoreV2" + input: "checkpoint_initializer_84/prefix" + input: "checkpoint_initializer_84/tensor_names" + input: "checkpoint_initializer_84/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_84" + op: "Identity" + input: "checkpoint_initializer_84" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_84" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel" + input: "Identity_84" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_85/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_85/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_4/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_85/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_85" + op: "RestoreV2" + input: "checkpoint_initializer_85/prefix" + input: "checkpoint_initializer_85/tensor_names" + input: "checkpoint_initializer_85/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_85" + op: "Identity" + input: "checkpoint_initializer_85" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_85" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/dense/bias" + input: "Identity_85" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_86/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_86/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_4/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_86/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_86" + op: "RestoreV2" + input: "checkpoint_initializer_86/prefix" + input: "checkpoint_initializer_86/tensor_names" + input: "checkpoint_initializer_86/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_86" + op: "Identity" + input: "checkpoint_initializer_86" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_86" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel" + input: "Identity_86" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_87/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_87/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_5/attention/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_87/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_87" + op: "RestoreV2" + input: "checkpoint_initializer_87/prefix" + input: "checkpoint_initializer_87/tensor_names" + input: "checkpoint_initializer_87/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_87" + op: "Identity" + input: "checkpoint_initializer_87" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_87" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias" + input: "Identity_87" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_88/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_88/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_5/attention/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_88/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_88" + op: "RestoreV2" + input: "checkpoint_initializer_88/prefix" + input: "checkpoint_initializer_88/tensor_names" + input: "checkpoint_initializer_88/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_88" + op: "Identity" + input: "checkpoint_initializer_88" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_88" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel" + input: "Identity_88" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_89/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_89/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_5/attention/self/key/bias" + } + } + } +} +node { + name: "checkpoint_initializer_89/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_89" + op: "RestoreV2" + input: "checkpoint_initializer_89/prefix" + input: "checkpoint_initializer_89/tensor_names" + input: "checkpoint_initializer_89/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_89" + op: "Identity" + input: "checkpoint_initializer_89" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_89" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias" + input: "Identity_89" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_90/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_90/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_5/attention/self/key/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_90/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_90" + op: "RestoreV2" + input: "checkpoint_initializer_90/prefix" + input: "checkpoint_initializer_90/tensor_names" + input: "checkpoint_initializer_90/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_90" + op: "Identity" + input: "checkpoint_initializer_90" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_90" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel" + input: "Identity_90" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_91/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_91/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_5/attention/self/query/bias" + } + } + } +} +node { + name: "checkpoint_initializer_91/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_91" + op: "RestoreV2" + input: "checkpoint_initializer_91/prefix" + input: "checkpoint_initializer_91/tensor_names" + input: "checkpoint_initializer_91/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_91" + op: "Identity" + input: "checkpoint_initializer_91" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_91" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias" + input: "Identity_91" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_92/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_92/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_5/attention/self/query/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_92/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_92" + op: "RestoreV2" + input: "checkpoint_initializer_92/prefix" + input: "checkpoint_initializer_92/tensor_names" + input: "checkpoint_initializer_92/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_92" + op: "Identity" + input: "checkpoint_initializer_92" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_92" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel" + input: "Identity_92" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_93/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_93/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_5/attention/self/value/bias" + } + } + } +} +node { + name: "checkpoint_initializer_93/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_93" + op: "RestoreV2" + input: "checkpoint_initializer_93/prefix" + input: "checkpoint_initializer_93/tensor_names" + input: "checkpoint_initializer_93/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_93" + op: "Identity" + input: "checkpoint_initializer_93" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_93" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias" + input: "Identity_93" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_94/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_94/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_5/attention/self/value/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_94/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_94" + op: "RestoreV2" + input: "checkpoint_initializer_94/prefix" + input: "checkpoint_initializer_94/tensor_names" + input: "checkpoint_initializer_94/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_94" + op: "Identity" + input: "checkpoint_initializer_94" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_94" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel" + input: "Identity_94" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_95/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_95/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_5/intermediate/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_95/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_95" + op: "RestoreV2" + input: "checkpoint_initializer_95/prefix" + input: "checkpoint_initializer_95/tensor_names" + input: "checkpoint_initializer_95/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_95" + op: "Identity" + input: "checkpoint_initializer_95" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_95" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias" + input: "Identity_95" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_96/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_96/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_5/intermediate/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_96/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_96" + op: "RestoreV2" + input: "checkpoint_initializer_96/prefix" + input: "checkpoint_initializer_96/tensor_names" + input: "checkpoint_initializer_96/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_96" + op: "Identity" + input: "checkpoint_initializer_96" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_96" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel" + input: "Identity_96" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_97/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_97/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_5/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_97/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_97" + op: "RestoreV2" + input: "checkpoint_initializer_97/prefix" + input: "checkpoint_initializer_97/tensor_names" + input: "checkpoint_initializer_97/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_97" + op: "Identity" + input: "checkpoint_initializer_97" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_97" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/dense/bias" + input: "Identity_97" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_98/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_98/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_5/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_98/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_98" + op: "RestoreV2" + input: "checkpoint_initializer_98/prefix" + input: "checkpoint_initializer_98/tensor_names" + input: "checkpoint_initializer_98/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_98" + op: "Identity" + input: "checkpoint_initializer_98" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_98" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel" + input: "Identity_98" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_99/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_99/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_6/attention/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_99/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_99" + op: "RestoreV2" + input: "checkpoint_initializer_99/prefix" + input: "checkpoint_initializer_99/tensor_names" + input: "checkpoint_initializer_99/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_99" + op: "Identity" + input: "checkpoint_initializer_99" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_99" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias" + input: "Identity_99" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_100/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_100/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_6/attention/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_100/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_100" + op: "RestoreV2" + input: "checkpoint_initializer_100/prefix" + input: "checkpoint_initializer_100/tensor_names" + input: "checkpoint_initializer_100/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_100" + op: "Identity" + input: "checkpoint_initializer_100" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_100" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel" + input: "Identity_100" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_101/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_101/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_6/attention/self/key/bias" + } + } + } +} +node { + name: "checkpoint_initializer_101/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_101" + op: "RestoreV2" + input: "checkpoint_initializer_101/prefix" + input: "checkpoint_initializer_101/tensor_names" + input: "checkpoint_initializer_101/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_101" + op: "Identity" + input: "checkpoint_initializer_101" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_101" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias" + input: "Identity_101" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_102/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_102/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_6/attention/self/key/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_102/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_102" + op: "RestoreV2" + input: "checkpoint_initializer_102/prefix" + input: "checkpoint_initializer_102/tensor_names" + input: "checkpoint_initializer_102/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_102" + op: "Identity" + input: "checkpoint_initializer_102" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_102" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel" + input: "Identity_102" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_103/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_103/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_6/attention/self/query/bias" + } + } + } +} +node { + name: "checkpoint_initializer_103/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_103" + op: "RestoreV2" + input: "checkpoint_initializer_103/prefix" + input: "checkpoint_initializer_103/tensor_names" + input: "checkpoint_initializer_103/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_103" + op: "Identity" + input: "checkpoint_initializer_103" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_103" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias" + input: "Identity_103" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_104/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_104/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_6/attention/self/query/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_104/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_104" + op: "RestoreV2" + input: "checkpoint_initializer_104/prefix" + input: "checkpoint_initializer_104/tensor_names" + input: "checkpoint_initializer_104/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_104" + op: "Identity" + input: "checkpoint_initializer_104" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_104" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel" + input: "Identity_104" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_105/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_105/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_6/attention/self/value/bias" + } + } + } +} +node { + name: "checkpoint_initializer_105/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_105" + op: "RestoreV2" + input: "checkpoint_initializer_105/prefix" + input: "checkpoint_initializer_105/tensor_names" + input: "checkpoint_initializer_105/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_105" + op: "Identity" + input: "checkpoint_initializer_105" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_105" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias" + input: "Identity_105" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_106/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_106/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_6/attention/self/value/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_106/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_106" + op: "RestoreV2" + input: "checkpoint_initializer_106/prefix" + input: "checkpoint_initializer_106/tensor_names" + input: "checkpoint_initializer_106/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_106" + op: "Identity" + input: "checkpoint_initializer_106" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_106" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel" + input: "Identity_106" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_107/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_107/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_6/intermediate/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_107/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_107" + op: "RestoreV2" + input: "checkpoint_initializer_107/prefix" + input: "checkpoint_initializer_107/tensor_names" + input: "checkpoint_initializer_107/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_107" + op: "Identity" + input: "checkpoint_initializer_107" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_107" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias" + input: "Identity_107" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_108/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_108/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_6/intermediate/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_108/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_108" + op: "RestoreV2" + input: "checkpoint_initializer_108/prefix" + input: "checkpoint_initializer_108/tensor_names" + input: "checkpoint_initializer_108/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_108" + op: "Identity" + input: "checkpoint_initializer_108" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_108" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel" + input: "Identity_108" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_109/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_109/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_6/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_109/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_109" + op: "RestoreV2" + input: "checkpoint_initializer_109/prefix" + input: "checkpoint_initializer_109/tensor_names" + input: "checkpoint_initializer_109/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_109" + op: "Identity" + input: "checkpoint_initializer_109" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_109" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/dense/bias" + input: "Identity_109" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_110/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_110/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_6/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_110/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_110" + op: "RestoreV2" + input: "checkpoint_initializer_110/prefix" + input: "checkpoint_initializer_110/tensor_names" + input: "checkpoint_initializer_110/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_110" + op: "Identity" + input: "checkpoint_initializer_110" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_110" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel" + input: "Identity_110" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_111/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_111/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_7/attention/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_111/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_111" + op: "RestoreV2" + input: "checkpoint_initializer_111/prefix" + input: "checkpoint_initializer_111/tensor_names" + input: "checkpoint_initializer_111/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_111" + op: "Identity" + input: "checkpoint_initializer_111" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_111" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias" + input: "Identity_111" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_112/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_112/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_7/attention/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_112/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_112" + op: "RestoreV2" + input: "checkpoint_initializer_112/prefix" + input: "checkpoint_initializer_112/tensor_names" + input: "checkpoint_initializer_112/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_112" + op: "Identity" + input: "checkpoint_initializer_112" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_112" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel" + input: "Identity_112" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_113/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_113/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_7/attention/self/key/bias" + } + } + } +} +node { + name: "checkpoint_initializer_113/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_113" + op: "RestoreV2" + input: "checkpoint_initializer_113/prefix" + input: "checkpoint_initializer_113/tensor_names" + input: "checkpoint_initializer_113/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_113" + op: "Identity" + input: "checkpoint_initializer_113" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_113" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias" + input: "Identity_113" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_114/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_114/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_7/attention/self/key/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_114/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_114" + op: "RestoreV2" + input: "checkpoint_initializer_114/prefix" + input: "checkpoint_initializer_114/tensor_names" + input: "checkpoint_initializer_114/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_114" + op: "Identity" + input: "checkpoint_initializer_114" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_114" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel" + input: "Identity_114" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_115/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_115/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_7/attention/self/query/bias" + } + } + } +} +node { + name: "checkpoint_initializer_115/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_115" + op: "RestoreV2" + input: "checkpoint_initializer_115/prefix" + input: "checkpoint_initializer_115/tensor_names" + input: "checkpoint_initializer_115/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_115" + op: "Identity" + input: "checkpoint_initializer_115" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_115" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias" + input: "Identity_115" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_116/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_116/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_7/attention/self/query/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_116/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_116" + op: "RestoreV2" + input: "checkpoint_initializer_116/prefix" + input: "checkpoint_initializer_116/tensor_names" + input: "checkpoint_initializer_116/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_116" + op: "Identity" + input: "checkpoint_initializer_116" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_116" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel" + input: "Identity_116" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_117/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_117/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_7/attention/self/value/bias" + } + } + } +} +node { + name: "checkpoint_initializer_117/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_117" + op: "RestoreV2" + input: "checkpoint_initializer_117/prefix" + input: "checkpoint_initializer_117/tensor_names" + input: "checkpoint_initializer_117/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_117" + op: "Identity" + input: "checkpoint_initializer_117" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_117" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias" + input: "Identity_117" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_118/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_118/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_7/attention/self/value/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_118/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_118" + op: "RestoreV2" + input: "checkpoint_initializer_118/prefix" + input: "checkpoint_initializer_118/tensor_names" + input: "checkpoint_initializer_118/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_118" + op: "Identity" + input: "checkpoint_initializer_118" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_118" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel" + input: "Identity_118" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_119/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_119/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_7/intermediate/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_119/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_119" + op: "RestoreV2" + input: "checkpoint_initializer_119/prefix" + input: "checkpoint_initializer_119/tensor_names" + input: "checkpoint_initializer_119/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_119" + op: "Identity" + input: "checkpoint_initializer_119" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_119" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias" + input: "Identity_119" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_120/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_120/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_7/intermediate/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_120/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_120" + op: "RestoreV2" + input: "checkpoint_initializer_120/prefix" + input: "checkpoint_initializer_120/tensor_names" + input: "checkpoint_initializer_120/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_120" + op: "Identity" + input: "checkpoint_initializer_120" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_120" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel" + input: "Identity_120" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_121/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_121/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_7/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_121/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_121" + op: "RestoreV2" + input: "checkpoint_initializer_121/prefix" + input: "checkpoint_initializer_121/tensor_names" + input: "checkpoint_initializer_121/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_121" + op: "Identity" + input: "checkpoint_initializer_121" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_121" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/dense/bias" + input: "Identity_121" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_122/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_122/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_7/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_122/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_122" + op: "RestoreV2" + input: "checkpoint_initializer_122/prefix" + input: "checkpoint_initializer_122/tensor_names" + input: "checkpoint_initializer_122/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_122" + op: "Identity" + input: "checkpoint_initializer_122" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_122" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel" + input: "Identity_122" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_123/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_123/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_8/attention/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_123/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_123" + op: "RestoreV2" + input: "checkpoint_initializer_123/prefix" + input: "checkpoint_initializer_123/tensor_names" + input: "checkpoint_initializer_123/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_123" + op: "Identity" + input: "checkpoint_initializer_123" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_123" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias" + input: "Identity_123" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_124/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_124/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_8/attention/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_124/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_124" + op: "RestoreV2" + input: "checkpoint_initializer_124/prefix" + input: "checkpoint_initializer_124/tensor_names" + input: "checkpoint_initializer_124/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_124" + op: "Identity" + input: "checkpoint_initializer_124" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_124" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel" + input: "Identity_124" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_125/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_125/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_8/attention/self/key/bias" + } + } + } +} +node { + name: "checkpoint_initializer_125/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_125" + op: "RestoreV2" + input: "checkpoint_initializer_125/prefix" + input: "checkpoint_initializer_125/tensor_names" + input: "checkpoint_initializer_125/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_125" + op: "Identity" + input: "checkpoint_initializer_125" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_125" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias" + input: "Identity_125" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_126/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_126/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_8/attention/self/key/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_126/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_126" + op: "RestoreV2" + input: "checkpoint_initializer_126/prefix" + input: "checkpoint_initializer_126/tensor_names" + input: "checkpoint_initializer_126/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_126" + op: "Identity" + input: "checkpoint_initializer_126" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_126" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel" + input: "Identity_126" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_127/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_127/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_8/attention/self/query/bias" + } + } + } +} +node { + name: "checkpoint_initializer_127/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_127" + op: "RestoreV2" + input: "checkpoint_initializer_127/prefix" + input: "checkpoint_initializer_127/tensor_names" + input: "checkpoint_initializer_127/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_127" + op: "Identity" + input: "checkpoint_initializer_127" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_127" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias" + input: "Identity_127" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_128/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_128/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_8/attention/self/query/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_128/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_128" + op: "RestoreV2" + input: "checkpoint_initializer_128/prefix" + input: "checkpoint_initializer_128/tensor_names" + input: "checkpoint_initializer_128/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_128" + op: "Identity" + input: "checkpoint_initializer_128" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_128" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel" + input: "Identity_128" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_129/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_129/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_8/attention/self/value/bias" + } + } + } +} +node { + name: "checkpoint_initializer_129/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_129" + op: "RestoreV2" + input: "checkpoint_initializer_129/prefix" + input: "checkpoint_initializer_129/tensor_names" + input: "checkpoint_initializer_129/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_129" + op: "Identity" + input: "checkpoint_initializer_129" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_129" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias" + input: "Identity_129" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_130/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_130/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_8/attention/self/value/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_130/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_130" + op: "RestoreV2" + input: "checkpoint_initializer_130/prefix" + input: "checkpoint_initializer_130/tensor_names" + input: "checkpoint_initializer_130/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_130" + op: "Identity" + input: "checkpoint_initializer_130" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_130" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel" + input: "Identity_130" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_131/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_131/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_8/intermediate/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_131/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_131" + op: "RestoreV2" + input: "checkpoint_initializer_131/prefix" + input: "checkpoint_initializer_131/tensor_names" + input: "checkpoint_initializer_131/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_131" + op: "Identity" + input: "checkpoint_initializer_131" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_131" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias" + input: "Identity_131" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_132/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_132/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_8/intermediate/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_132/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_132" + op: "RestoreV2" + input: "checkpoint_initializer_132/prefix" + input: "checkpoint_initializer_132/tensor_names" + input: "checkpoint_initializer_132/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_132" + op: "Identity" + input: "checkpoint_initializer_132" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_132" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel" + input: "Identity_132" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_133/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_133/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_8/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_133/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_133" + op: "RestoreV2" + input: "checkpoint_initializer_133/prefix" + input: "checkpoint_initializer_133/tensor_names" + input: "checkpoint_initializer_133/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_133" + op: "Identity" + input: "checkpoint_initializer_133" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_133" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/dense/bias" + input: "Identity_133" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_134/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_134/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_8/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_134/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_134" + op: "RestoreV2" + input: "checkpoint_initializer_134/prefix" + input: "checkpoint_initializer_134/tensor_names" + input: "checkpoint_initializer_134/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_134" + op: "Identity" + input: "checkpoint_initializer_134" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_134" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel" + input: "Identity_134" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_135/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_135/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_9/attention/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_135/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_135" + op: "RestoreV2" + input: "checkpoint_initializer_135/prefix" + input: "checkpoint_initializer_135/tensor_names" + input: "checkpoint_initializer_135/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_135" + op: "Identity" + input: "checkpoint_initializer_135" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_135" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias" + input: "Identity_135" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_136/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_136/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_9/attention/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_136/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_136" + op: "RestoreV2" + input: "checkpoint_initializer_136/prefix" + input: "checkpoint_initializer_136/tensor_names" + input: "checkpoint_initializer_136/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_136" + op: "Identity" + input: "checkpoint_initializer_136" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_136" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel" + input: "Identity_136" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_137/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_137/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_9/attention/self/key/bias" + } + } + } +} +node { + name: "checkpoint_initializer_137/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_137" + op: "RestoreV2" + input: "checkpoint_initializer_137/prefix" + input: "checkpoint_initializer_137/tensor_names" + input: "checkpoint_initializer_137/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_137" + op: "Identity" + input: "checkpoint_initializer_137" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_137" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias" + input: "Identity_137" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_138/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_138/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_9/attention/self/key/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_138/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_138" + op: "RestoreV2" + input: "checkpoint_initializer_138/prefix" + input: "checkpoint_initializer_138/tensor_names" + input: "checkpoint_initializer_138/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_138" + op: "Identity" + input: "checkpoint_initializer_138" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_138" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel" + input: "Identity_138" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_139/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_139/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_9/attention/self/query/bias" + } + } + } +} +node { + name: "checkpoint_initializer_139/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_139" + op: "RestoreV2" + input: "checkpoint_initializer_139/prefix" + input: "checkpoint_initializer_139/tensor_names" + input: "checkpoint_initializer_139/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_139" + op: "Identity" + input: "checkpoint_initializer_139" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_139" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias" + input: "Identity_139" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_140/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_140/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_9/attention/self/query/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_140/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_140" + op: "RestoreV2" + input: "checkpoint_initializer_140/prefix" + input: "checkpoint_initializer_140/tensor_names" + input: "checkpoint_initializer_140/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_140" + op: "Identity" + input: "checkpoint_initializer_140" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_140" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel" + input: "Identity_140" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_141/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_141/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_9/attention/self/value/bias" + } + } + } +} +node { + name: "checkpoint_initializer_141/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_141" + op: "RestoreV2" + input: "checkpoint_initializer_141/prefix" + input: "checkpoint_initializer_141/tensor_names" + input: "checkpoint_initializer_141/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_141" + op: "Identity" + input: "checkpoint_initializer_141" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_141" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias" + input: "Identity_141" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_142/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_142/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_9/attention/self/value/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_142/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_142" + op: "RestoreV2" + input: "checkpoint_initializer_142/prefix" + input: "checkpoint_initializer_142/tensor_names" + input: "checkpoint_initializer_142/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_142" + op: "Identity" + input: "checkpoint_initializer_142" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_142" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel" + input: "Identity_142" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_143/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_143/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_9/intermediate/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_143/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_143" + op: "RestoreV2" + input: "checkpoint_initializer_143/prefix" + input: "checkpoint_initializer_143/tensor_names" + input: "checkpoint_initializer_143/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_143" + op: "Identity" + input: "checkpoint_initializer_143" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_143" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias" + input: "Identity_143" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_144/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_144/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_9/intermediate/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_144/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_144" + op: "RestoreV2" + input: "checkpoint_initializer_144/prefix" + input: "checkpoint_initializer_144/tensor_names" + input: "checkpoint_initializer_144/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_144" + op: "Identity" + input: "checkpoint_initializer_144" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_144" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel" + input: "Identity_144" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_145/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_145/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_9/output/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_145/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_145" + op: "RestoreV2" + input: "checkpoint_initializer_145/prefix" + input: "checkpoint_initializer_145/tensor_names" + input: "checkpoint_initializer_145/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_145" + op: "Identity" + input: "checkpoint_initializer_145" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_145" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/dense/bias" + input: "Identity_145" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_146/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_146/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/encoder/layer_9/output/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_146/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_146" + op: "RestoreV2" + input: "checkpoint_initializer_146/prefix" + input: "checkpoint_initializer_146/tensor_names" + input: "checkpoint_initializer_146/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_146" + op: "Identity" + input: "checkpoint_initializer_146" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_146" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel" + input: "Identity_146" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_147/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_147/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/pooler/dense/bias" + } + } + } +} +node { + name: "checkpoint_initializer_147/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_147" + op: "RestoreV2" + input: "checkpoint_initializer_147/prefix" + input: "checkpoint_initializer_147/tensor_names" + input: "checkpoint_initializer_147/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_147" + op: "Identity" + input: "checkpoint_initializer_147" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_147" + op: "AssignVariableOp" + input: "bert/pooler/dense/bias" + input: "Identity_147" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "checkpoint_initializer_148/prefix" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "/home/demouser/Documents/Demos/energycalculatorsevaluation/data/bert/uncased_L-12_H-768_A-12/bert_model.ckpt" + } + } + } +} +node { + name: "checkpoint_initializer_148/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "bert/pooler/dense/kernel" + } + } + } +} +node { + name: "checkpoint_initializer_148/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "" + } + } + } +} +node { + name: "checkpoint_initializer_148" + op: "RestoreV2" + input: "checkpoint_initializer_148/prefix" + input: "checkpoint_initializer_148/tensor_names" + input: "checkpoint_initializer_148/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + } + } + } +} +node { + name: "Identity_148" + op: "Identity" + input: "checkpoint_initializer_148" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "AssignVariableOp_148" + op: "AssignVariableOp" + input: "bert/pooler/dense/kernel" + input: "Identity_148" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "one_hot/on_value" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "one_hot/off_value" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "one_hot/depth" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 384 + } + } + } +} +node { + name: "one_hot" + op: "OneHot" + input: "IteratorGetNext:4" + input: "one_hot/depth" + input: "one_hot/on_value" + input: "one_hot/off_value" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "TI" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "axis" + value { + i: -1 + } + } +} +node { + name: "LogSoftmax" + op: "LogSoftmax" + input: "unstack" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "mul" + op: "Mul" + input: "one_hot" + input: "LogSoftmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "Sum/reduction_indices" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: -1 + } + } + } +} +node { + name: "Sum" + op: "Sum" + input: "mul" + input: "Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "Mean" + op: "Mean" + input: "Sum" + input: "Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "Neg" + op: "Neg" + input: "Mean" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "one_hot_1/on_value" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "one_hot_1/off_value" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "one_hot_1/depth" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 384 + } + } + } +} +node { + name: "one_hot_1" + op: "OneHot" + input: "IteratorGetNext" + input: "one_hot_1/depth" + input: "one_hot_1/on_value" + input: "one_hot_1/off_value" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "TI" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "axis" + value { + i: -1 + } + } +} +node { + name: "LogSoftmax_1" + op: "LogSoftmax" + input: "unstack:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "mul_1" + op: "Mul" + input: "one_hot_1" + input: "LogSoftmax_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "Sum_1/reduction_indices" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: -1 + } + } + } +} +node { + name: "Sum_1" + op: "Sum" + input: "mul_1" + input: "Sum_1/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "Const_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "Mean_1" + op: "Mean" + input: "Sum_1" + input: "Const_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "Neg_1" + op: "Neg" + input: "Mean_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "add" + op: "AddV2" + input: "Neg" + input: "Neg_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "truediv/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 2.0 + } + } + } +} +node { + name: "truediv" + op: "RealDiv" + input: "add" + input: "truediv/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "Const_3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 2.9999999242136255e-05 + } + } + } +} +node { + name: "PolynomialDecay/Cast/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "PolynomialDecay/Cast_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "PolynomialDecay/Cast_2/ReadVariableOp" + op: "ReadVariableOp" + input: "global_step" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } +} +node { + name: "PolynomialDecay/Cast_2" + op: "Cast" + input: "PolynomialDecay/Cast_2/ReadVariableOp" + attr { + key: "DstT" + value { + type: DT_FLOAT + } + } + attr { + key: "SrcT" + value { + type: DT_INT64 + } + } + attr { + key: "Truncate" + value { + b: false + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "PolynomialDecay/Cast_3/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 9 + } + } + } +} +node { + name: "PolynomialDecay/Cast_3" + op: "Cast" + input: "PolynomialDecay/Cast_3/x" + attr { + key: "DstT" + value { + type: DT_FLOAT + } + } + attr { + key: "SrcT" + value { + type: DT_INT32 + } + } + attr { + key: "Truncate" + value { + b: false + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "PolynomialDecay/Minimum" + op: "Minimum" + input: "PolynomialDecay/Cast_2" + input: "PolynomialDecay/Cast_3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "PolynomialDecay/truediv" + op: "RealDiv" + input: "PolynomialDecay/Minimum" + input: "PolynomialDecay/Cast_3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "PolynomialDecay/sub" + op: "Sub" + input: "Const_3" + input: "PolynomialDecay/Cast/x" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "PolynomialDecay/sub_1/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "PolynomialDecay/sub_1" + op: "Sub" + input: "PolynomialDecay/sub_1/x" + input: "PolynomialDecay/truediv" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "PolynomialDecay/Pow" + op: "Pow" + input: "PolynomialDecay/sub_1" + input: "PolynomialDecay/Cast_1/x" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "PolynomialDecay/Mul" + op: "Mul" + input: "PolynomialDecay/sub" + input: "PolynomialDecay/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "PolynomialDecay" + op: "AddV2" + input: "PolynomialDecay/Mul" + input: "PolynomialDecay/Cast/x" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@truediv" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/grad_ys_0/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@truediv" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/grad_ys_0" + op: "Fill" + input: "gradients/Shape" + input: "gradients/grad_ys_0/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@truediv" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/truediv_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@truediv" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/truediv_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@truediv" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/truediv_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/truediv_grad/Shape" + input: "gradients/truediv_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@truediv" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/truediv_grad/RealDiv" + op: "RealDiv" + input: "gradients/grad_ys_0" + input: "truediv/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@truediv" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/truediv_grad/Sum" + op: "Sum" + input: "gradients/truediv_grad/RealDiv" + input: "gradients/truediv_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@truediv" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/truediv_grad/Reshape" + op: "Reshape" + input: "gradients/truediv_grad/Sum" + input: "gradients/truediv_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@truediv" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/truediv_grad/Neg" + op: "Neg" + input: "add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@truediv" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/truediv_grad/RealDiv_1" + op: "RealDiv" + input: "gradients/truediv_grad/Neg" + input: "truediv/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@truediv" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/truediv_grad/RealDiv_2" + op: "RealDiv" + input: "gradients/truediv_grad/RealDiv_1" + input: "truediv/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@truediv" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/truediv_grad/mul" + op: "Mul" + input: "gradients/grad_ys_0" + input: "gradients/truediv_grad/RealDiv_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@truediv" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/truediv_grad/Sum_1" + op: "Sum" + input: "gradients/truediv_grad/mul" + input: "gradients/truediv_grad/BroadcastGradientArgs:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@truediv" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/truediv_grad/Reshape_1" + op: "Reshape" + input: "gradients/truediv_grad/Sum_1" + input: "gradients/truediv_grad/Shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@truediv" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/Neg_grad/Neg" + op: "Neg" + input: "gradients/truediv_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@Neg" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/Neg_1_grad/Neg" + op: "Neg" + input: "gradients/truediv_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@Neg_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/Mean_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@Mean" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "gradients/Mean_grad/Reshape" + op: "Reshape" + input: "gradients/Neg_grad/Neg" + input: "gradients/Mean_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@Mean" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/Mean_grad/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@Mean" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3 + } + } + } +} +node { + name: "gradients/Mean_grad/Tile" + op: "Tile" + input: "gradients/Mean_grad/Reshape" + input: "gradients/Mean_grad/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tmultiples" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@Mean" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } +} +node { + name: "gradients/Mean_grad/Const_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@Mean" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 3.0 + } + } + } +} +node { + name: "gradients/Mean_grad/truediv" + op: "RealDiv" + input: "gradients/Mean_grad/Tile" + input: "gradients/Mean_grad/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@Mean" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } +} +node { + name: "gradients/Mean_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@Mean_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "gradients/Mean_1_grad/Reshape" + op: "Reshape" + input: "gradients/Neg_1_grad/Neg" + input: "gradients/Mean_1_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@Mean_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/Mean_1_grad/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@Mean_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3 + } + } + } +} +node { + name: "gradients/Mean_1_grad/Tile" + op: "Tile" + input: "gradients/Mean_1_grad/Reshape" + input: "gradients/Mean_1_grad/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tmultiples" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@Mean_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } +} +node { + name: "gradients/Mean_1_grad/Const_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@Mean_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 3.0 + } + } + } +} +node { + name: "gradients/Mean_1_grad/truediv" + op: "RealDiv" + input: "gradients/Mean_1_grad/Tile" + input: "gradients/Mean_1_grad/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@Mean_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } +} +node { + name: "gradients/Sum_grad/Maximum/x" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@Sum" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\003\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/Sum_grad/Maximum/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@Sum" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "gradients/Sum_grad/Maximum" + op: "Maximum" + input: "gradients/Sum_grad/Maximum/x" + input: "gradients/Sum_grad/Maximum/y" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@Sum" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } +} +node { + name: "gradients/Sum_grad/floordiv/x" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@Sum" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\003\000\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/Sum_grad/floordiv" + op: "FloorDiv" + input: "gradients/Sum_grad/floordiv/x" + input: "gradients/Sum_grad/Maximum" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@Sum" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } +} +node { + name: "gradients/Sum_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@Sum" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\003\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/Sum_grad/Reshape" + op: "Reshape" + input: "gradients/Mean_grad/truediv" + input: "gradients/Sum_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@Sum" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/Sum_grad/Tile/multiples" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@Sum" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\001\000\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/Sum_grad/Tile" + op: "Tile" + input: "gradients/Sum_grad/Reshape" + input: "gradients/Sum_grad/Tile/multiples" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tmultiples" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@Sum" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/Sum_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@Sum_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\003\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/Sum_1_grad/Reshape" + op: "Reshape" + input: "gradients/Mean_1_grad/truediv" + input: "gradients/Sum_1_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@Sum_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/Sum_1_grad/Tile/multiples" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@Sum_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\001\000\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/Sum_1_grad/Tile" + op: "Tile" + input: "gradients/Sum_1_grad/Reshape" + input: "gradients/Sum_1_grad/Tile/multiples" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tmultiples" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@Sum_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/mul_grad/Mul" + op: "Mul" + input: "gradients/Sum_grad/Tile" + input: "LogSoftmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/mul_grad/Mul_1" + op: "Mul" + input: "gradients/Sum_grad/Tile" + input: "one_hot" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/mul_1_grad/Mul" + op: "Mul" + input: "gradients/Sum_1_grad/Tile" + input: "LogSoftmax_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/mul_1_grad/Mul_1" + op: "Mul" + input: "gradients/Sum_1_grad/Tile" + input: "one_hot_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/LogSoftmax_grad/Exp" + op: "Exp" + input: "LogSoftmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@LogSoftmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/LogSoftmax_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@LogSoftmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: -1 + } + } + } +} +node { + name: "gradients/LogSoftmax_grad/Sum" + op: "Sum" + input: "gradients/mul_grad/Mul_1" + input: "gradients/LogSoftmax_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@LogSoftmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/LogSoftmax_grad/mul" + op: "Mul" + input: "gradients/LogSoftmax_grad/Sum" + input: "gradients/LogSoftmax_grad/Exp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@LogSoftmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/LogSoftmax_grad/sub" + op: "Sub" + input: "gradients/mul_grad/Mul_1" + input: "gradients/LogSoftmax_grad/mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@LogSoftmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/LogSoftmax_1_grad/Exp" + op: "Exp" + input: "LogSoftmax_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@LogSoftmax_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/LogSoftmax_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@LogSoftmax_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: -1 + } + } + } +} +node { + name: "gradients/LogSoftmax_1_grad/Sum" + op: "Sum" + input: "gradients/mul_1_grad/Mul_1" + input: "gradients/LogSoftmax_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@LogSoftmax_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/LogSoftmax_1_grad/mul" + op: "Mul" + input: "gradients/LogSoftmax_1_grad/Sum" + input: "gradients/LogSoftmax_1_grad/Exp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@LogSoftmax_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/LogSoftmax_1_grad/sub" + op: "Sub" + input: "gradients/mul_1_grad/Mul_1" + input: "gradients/LogSoftmax_1_grad/mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@LogSoftmax_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/unstack_grad/stack" + op: "Pack" + input: "gradients/LogSoftmax_grad/sub" + input: "gradients/LogSoftmax_1_grad/sub" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@unstack" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 3 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "gradients/transpose_grad/InvertPermutation" + op: "InvertPermutation" + input: "transpose/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } +} +node { + name: "gradients/transpose_grad/transpose" + op: "Transpose" + input: "gradients/unstack_grad/stack" + input: "gradients/transpose_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 2 + } + } + } + } + } +} +node { + name: "gradients/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\002\000\000\000" + } + } + } +} +node { + name: "gradients/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/transpose_grad/transpose" + input: "gradients/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 2 + } + } + } + } + } +} +node { + name: "gradients/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/Reshape_1_grad/Reshape" + input: "MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/MatMul_grad/MatMul_1" + op: "MatMul" + input: "gradients/Reshape_1_grad/Reshape" + input: "Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/MatMul_grad/MatMul" + input: "gradients/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/Reshape_13_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/Reshape_13" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/Reshape_13_grad/Reshape" + op: "Reshape" + input: "gradients/Reshape_grad/Reshape" + input: "gradients/bert/encoder/Reshape_13_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/Reshape_13" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/layer_normalization_24/add_grad/BroadcastGradientArgs/s0" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/layer_normalization_24/add_grad/BroadcastGradientArgs/s1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/layer_normalization_24/add_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/add_grad/BroadcastGradientArgs/s0" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/add_grad/BroadcastGradientArgs/s1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/layer_normalization_24/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/layer_normalization_24/add_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/Reshape_13_grad/Reshape" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/layer_normalization_24/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/layer_normalization_24/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/add_grad/Sum" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/layer_normalization_24/mul_2_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/Reshape_13_grad/Reshape" + input: "bert/encoder/layer_11/output/layer_normalization_24/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/layer_normalization_24/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_11/output/layer_normalization_24/Reshape_1" + input: "gradients/bert/encoder/Reshape_13_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/layer_normalization_24/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/layer_normalization_24/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/layer_normalization_24/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/layer_normalization_24/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/layer_normalization_24/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/layer_normalization_24/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like" + op: "ZerosLike" + input: "bert/encoder/layer_11/output/layer_normalization_24/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_1" + op: "ZerosLike" + input: "bert/encoder/layer_11/output/layer_normalization_24/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_2" + op: "ZerosLike" + input: "bert/encoder/layer_11/output/layer_normalization_24/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_3" + op: "ZerosLike" + input: "bert/encoder/layer_11/output/layer_normalization_24/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_4" + op: "ZerosLike" + input: "bert/encoder/layer_11/output/layer_normalization_24/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/layer_normalization_24/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_11/output/layer_normalization_24/Reshape" + input: "bert/encoder/layer_11/output/layer_normalization_24/ones" + input: "bert/encoder/layer_11/output/layer_normalization_24/FusedBatchNormV3:3" + input: "bert/encoder/layer_11/output/layer_normalization_24/FusedBatchNormV3:4" + input: "bert/encoder/layer_11/output/layer_normalization_24/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/layer_normalization_24/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/layer_normalization_24/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_11/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_11/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/Mul_grad/BroadcastGradientArgs/s0" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/Mul_grad/BroadcastGradientArgs/s1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/Mul_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_11/output/dropout/Mul_grad/BroadcastGradientArgs/s0" + input: "gradients/bert/encoder/layer_11/output/dropout/Mul_grad/BroadcastGradientArgs/s1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_11/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_11/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_11/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_11/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_11/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_11/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_11/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_11/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_11/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_11/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_11/intermediate/dense/mul_3" + input: "gradients/bert/encoder/layer_11/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_3_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_11/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_11/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_3_grad/Mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_11/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_11/intermediate/dense/BiasAdd" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_2_grad/BroadcastGradientArgs/s0" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_2_grad/BroadcastGradientArgs/s0_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_2_grad/BroadcastGradientArgs/s1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_2_grad/BroadcastGradientArgs/s0_1" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_2_grad/BroadcastGradientArgs/s1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_2_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_3_grad/Mul_1" + input: "bert/encoder/layer_11/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_2_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_2_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_11/intermediate/dense/mul_2/x" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_3_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_2" + } + } + } + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/add_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/add_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_11/intermediate/dense/add_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/add_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/add_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/add_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/intermediate/dense/add_1_grad/Sum" + input: "gradients/bert/encoder/layer_11/intermediate/dense/add_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Tanh_grad/TanhGrad" + op: "TanhGrad" + input: "bert/encoder/layer_11/intermediate/dense/Tanh" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_2_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Tanh" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_1_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Tanh_grad/TanhGrad" + input: "bert/encoder/layer_11/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_1_grad/Mul" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_1_grad/Sum" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_1_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_11/intermediate/dense/mul_1/x" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Tanh_grad/TanhGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_1_grad/Mul_1" + input: "bert/encoder/layer_11/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_grad/Mul" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_grad/Sum" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_11/intermediate/dense/mul/x" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_1_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/BroadcastGradientArgs/s0" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/BroadcastGradientArgs/s1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/BroadcastGradientArgs/s0" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/BroadcastGradientArgs/s1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_11/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/sub/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/sub" + op: "Sub" + input: "bert/encoder/layer_11/intermediate/dense/Pow/y" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/sub/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Pow" + op: "Pow" + input: "bert/encoder/layer_11/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/sub" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/mul" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Greater/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Greater" + op: "Greater" + input: "bert/encoder/layer_11/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Greater/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/ones_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/ones_like" + op: "Fill" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/ones_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Select" + op: "Select" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Greater" + input: "bert/encoder/layer_11/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/ones_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Log" + op: "Log" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Select" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/zeros_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/zeros_like" + op: "Fill" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/zeros_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Select_1" + op: "Select" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Greater" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Log" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/zeros_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/mul_2" + op: "Mul" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_11/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/mul_3" + op: "Mul" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/mul_2" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Select_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/mul_3" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Sum" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/AddN" + op: "AddN" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_3_grad/Mul" + input: "gradients/bert/encoder/layer_11/intermediate/dense/mul_1_grad/Mul_1" + input: "gradients/bert/encoder/layer_11/intermediate/dense/Pow_grad/mul_1" + attr { + key: "N" + value { + i: 3 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/AddN" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/AddN" + input: "bert/encoder/layer_11/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_11/intermediate/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/add" + input: "gradients/AddN" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_1" + op: "AddN" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_11/intermediate/dense/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_1" + input: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/add_grad/Sum" + input: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_1" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape_1" + input: "gradients/AddN_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_5" + op: "ZerosLike" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_6" + op: "ZerosLike" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_7" + op: "ZerosLike" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_8" + op: "ZerosLike" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_9" + op: "ZerosLike" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/ones" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/FusedBatchNormV3:3" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/FusedBatchNormV3:4" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_11/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_11/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_11/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_11/attention/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_11/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_11/attention/self/Reshape_3" + input: "gradients/bert/encoder/layer_11/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/Reshape_3_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/Reshape_3_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/attention/output/dense/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_11/attention/self/Reshape_3_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/transpose_3_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_11/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/transpose_3_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_11/attention/self/Reshape_3_grad/Reshape" + input: "gradients/bert/encoder/layer_11/attention/self/transpose_3_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/MatMul_1_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_11/attention/self/transpose_3_grad/transpose" + input: "bert/encoder/layer_11/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/MatMul_1_grad/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_11/attention/self/dropout/SelectV2" + input: "gradients/bert/encoder/layer_11/attention/self/transpose_3_grad/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_11/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_11/attention/self/MatMul_1_grad/MatMul" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_11/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_11/attention/self/MatMul_1_grad/MatMul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/transpose_2_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_11/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/transpose_2_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_11/attention/self/MatMul_1_grad/MatMul_1" + input: "gradients/bert/encoder/layer_11/attention/self/transpose_2_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/Mul_grad/BroadcastGradientArgs/s0" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/Mul_grad/BroadcastGradientArgs/s1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/Mul_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/Mul_grad/BroadcastGradientArgs/s0" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/Mul_grad/BroadcastGradientArgs/s1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_11/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_11/attention/self/Softmax" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/Reshape_2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/Reshape_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/attention/self/transpose_2_grad/transpose" + input: "gradients/bert/encoder/layer_11/attention/self/Reshape_2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/Softmax_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_11/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/Softmax_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: -1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/Softmax_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_11/attention/self/Softmax_grad/mul" + input: "gradients/bert/encoder/layer_11/attention/self/Softmax_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/Softmax_grad/sub" + op: "Sub" + input: "gradients/bert/encoder/layer_11/attention/self/dropout/Mul_grad/Mul" + input: "gradients/bert/encoder/layer_11/attention/self/Softmax_grad/Sum" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/Softmax_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_11/attention/self/Softmax_grad/sub" + input: "bert/encoder/layer_11/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/value/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_11/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/add_grad/BroadcastGradientArgs/s0" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/add_grad/BroadcastGradientArgs/s1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\001\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/add_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_11/attention/self/add_grad/BroadcastGradientArgs/s0" + input: "gradients/bert/encoder/layer_11/attention/self/add_grad/BroadcastGradientArgs/s1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/add_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_11/attention/self/Softmax_grad/mul_1" + input: "gradients/bert/encoder/layer_11/attention/self/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\001\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/attention/self/add_grad/Sum" + input: "gradients/bert/encoder/layer_11/attention/self/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/value/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_11/attention/self/Reshape_2_grad/Reshape" + input: "bert/encoder/layer_11/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/value/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_10/output/layer_normalization_22/add" + input: "gradients/bert/encoder/layer_11/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_11/attention/self/Softmax_grad/mul_1" + input: "bert/encoder/layer_11/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_11/attention/self/MatMul" + input: "gradients/bert/encoder/layer_11/attention/self/Softmax_grad/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_11/attention/self/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_11/attention/self/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/attention/self/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_11/attention/self/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/MatMul_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_11/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_11/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/MatMul_grad/MatMul_1" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_11/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_11/attention/self/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/transpose_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_11/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/transpose_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_11/attention/self/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_11/attention/self/transpose_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/transpose_1_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_11/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/transpose_1_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_11/attention/self/MatMul_grad/MatMul_1" + input: "gradients/bert/encoder/layer_11/attention/self/transpose_1_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/attention/self/transpose_grad/transpose" + input: "gradients/bert/encoder/layer_11/attention/self/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_11/attention/self/transpose_1_grad/transpose" + input: "gradients/bert/encoder/layer_11/attention/self/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/query/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_11/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/key/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_11/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/query/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_11/attention/self/Reshape_grad/Reshape" + input: "bert/encoder/layer_11/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/query/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_10/output/layer_normalization_22/add" + input: "gradients/bert/encoder/layer_11/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/key/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_11/attention/self/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_11/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_11/attention/self/key/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_10/output/layer_normalization_22/add" + input: "gradients/bert/encoder/layer_11/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_2" + op: "AddN" + input: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_11/attention/self/value/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_11/attention/self/query/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_11/attention/self/key/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/layer_normalization_22/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/layer_normalization_22/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_2" + input: "gradients/bert/encoder/layer_10/output/layer_normalization_22/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/layer_normalization_22/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/layer_normalization_22/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/output/layer_normalization_22/add_grad/Sum" + input: "gradients/bert/encoder/layer_10/output/layer_normalization_22/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/layer_normalization_22/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_2" + input: "bert/encoder/layer_10/output/layer_normalization_22/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/layer_normalization_22/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_10/output/layer_normalization_22/Reshape_1" + input: "gradients/AddN_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/layer_normalization_22/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/layer_normalization_22/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_10/output/layer_normalization_22/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_10/output/layer_normalization_22/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/layer_normalization_22/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/layer_normalization_22/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/output/layer_normalization_22/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_10/output/layer_normalization_22/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/layer_normalization_22/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/layer_normalization_22/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/output/layer_normalization_22/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_10/output/layer_normalization_22/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_10" + op: "ZerosLike" + input: "bert/encoder/layer_10/output/layer_normalization_22/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_11" + op: "ZerosLike" + input: "bert/encoder/layer_10/output/layer_normalization_22/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_12" + op: "ZerosLike" + input: "bert/encoder/layer_10/output/layer_normalization_22/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_13" + op: "ZerosLike" + input: "bert/encoder/layer_10/output/layer_normalization_22/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_14" + op: "ZerosLike" + input: "bert/encoder/layer_10/output/layer_normalization_22/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/layer_normalization_22/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_10/output/layer_normalization_22/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_10/output/layer_normalization_22/Reshape" + input: "bert/encoder/layer_10/output/layer_normalization_22/ones" + input: "bert/encoder/layer_10/output/layer_normalization_22/FusedBatchNormV3:3" + input: "bert/encoder/layer_10/output/layer_normalization_22/FusedBatchNormV3:4" + input: "bert/encoder/layer_10/output/layer_normalization_22/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/layer_normalization_22/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/layer_normalization_22/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/output/layer_normalization_22/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_10/output/layer_normalization_22/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_10/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_10/output/layer_normalization_22/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_10/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_10/output/layer_normalization_22/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_10/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_10/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_10/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_10/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_10/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_10/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_10/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_10/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_10/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_10/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_10/intermediate/dense/mul_3" + input: "gradients/bert/encoder/layer_10/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_3_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_10/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_10/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_3_grad/Mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_10/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_10/intermediate/dense/BiasAdd" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_2_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_3_grad/Mul_1" + input: "bert/encoder/layer_10/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_2_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_2_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_10/intermediate/dense/mul_2/x" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_3_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul_2" + } + } + } + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/add_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/add_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_10/intermediate/dense/add_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/add_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/add_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/add_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/intermediate/dense/add_1_grad/Sum" + input: "gradients/bert/encoder/layer_10/intermediate/dense/add_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Tanh_grad/TanhGrad" + op: "TanhGrad" + input: "bert/encoder/layer_10/intermediate/dense/Tanh" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_2_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Tanh" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_1_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Tanh_grad/TanhGrad" + input: "bert/encoder/layer_10/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_1_grad/Mul" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_1_grad/Sum" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_1_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_10/intermediate/dense/mul_1/x" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Tanh_grad/TanhGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_1_grad/Mul_1" + input: "bert/encoder/layer_10/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_grad/Mul" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_grad/Sum" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_10/intermediate/dense/mul/x" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_1_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_10/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/sub/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/sub" + op: "Sub" + input: "bert/encoder/layer_10/intermediate/dense/Pow/y" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/sub/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Pow" + op: "Pow" + input: "bert/encoder/layer_10/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/sub" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/mul" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Greater/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Greater" + op: "Greater" + input: "bert/encoder/layer_10/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Greater/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/ones_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/ones_like" + op: "Fill" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/ones_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Select" + op: "Select" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Greater" + input: "bert/encoder/layer_10/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/ones_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Log" + op: "Log" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Select" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/zeros_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/zeros_like" + op: "Fill" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/zeros_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Select_1" + op: "Select" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Greater" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Log" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/zeros_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/mul_2" + op: "Mul" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_10/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/mul_3" + op: "Mul" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/mul_2" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Select_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/mul_3" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Sum" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/AddN_3" + op: "AddN" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_3_grad/Mul" + input: "gradients/bert/encoder/layer_10/intermediate/dense/mul_1_grad/Mul_1" + input: "gradients/bert/encoder/layer_10/intermediate/dense/Pow_grad/mul_1" + attr { + key: "N" + value { + i: 3 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/AddN_3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/AddN_3" + input: "bert/encoder/layer_10/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_10/intermediate/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/add" + input: "gradients/AddN_3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_4" + op: "AddN" + input: "gradients/bert/encoder/layer_10/output/layer_normalization_22/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_10/intermediate/dense/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_4" + input: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/add_grad/Sum" + input: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_4" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape_1" + input: "gradients/AddN_4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_15" + op: "ZerosLike" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_16" + op: "ZerosLike" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_17" + op: "ZerosLike" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_18" + op: "ZerosLike" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_19" + op: "ZerosLike" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/ones" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/FusedBatchNormV3:3" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/FusedBatchNormV3:4" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_10/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_10/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_10/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_10/attention/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_10/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_10/attention/self/Reshape_3" + input: "gradients/bert/encoder/layer_10/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/Reshape_3_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/Reshape_3_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/attention/output/dense/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_10/attention/self/Reshape_3_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/transpose_3_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_10/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/transpose_3_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_10/attention/self/Reshape_3_grad/Reshape" + input: "gradients/bert/encoder/layer_10/attention/self/transpose_3_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/MatMul_1_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_10/attention/self/transpose_3_grad/transpose" + input: "bert/encoder/layer_10/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/MatMul_1_grad/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_10/attention/self/dropout/SelectV2" + input: "gradients/bert/encoder/layer_10/attention/self/transpose_3_grad/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_10/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_10/attention/self/MatMul_1_grad/MatMul" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_10/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_10/attention/self/MatMul_1_grad/MatMul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/transpose_2_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_10/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/transpose_2_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_10/attention/self/MatMul_1_grad/MatMul_1" + input: "gradients/bert/encoder/layer_10/attention/self/transpose_2_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_10/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_10/attention/self/Softmax" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/Reshape_2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/Reshape_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/attention/self/transpose_2_grad/transpose" + input: "gradients/bert/encoder/layer_10/attention/self/Reshape_2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/Softmax_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_10/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/Softmax_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: -1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/Softmax_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_10/attention/self/Softmax_grad/mul" + input: "gradients/bert/encoder/layer_10/attention/self/Softmax_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/Softmax_grad/sub" + op: "Sub" + input: "gradients/bert/encoder/layer_10/attention/self/dropout/Mul_grad/Mul" + input: "gradients/bert/encoder/layer_10/attention/self/Softmax_grad/Sum" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/Softmax_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_10/attention/self/Softmax_grad/sub" + input: "bert/encoder/layer_10/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/value/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_10/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/add_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_10/attention/self/Softmax_grad/mul_1" + input: "gradients/bert/encoder/layer_10/attention/self/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\001\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/attention/self/add_grad/Sum" + input: "gradients/bert/encoder/layer_10/attention/self/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/value/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_10/attention/self/Reshape_2_grad/Reshape" + input: "bert/encoder/layer_10/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/value/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_9/output/layer_normalization_20/add" + input: "gradients/bert/encoder/layer_10/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_10/attention/self/Softmax_grad/mul_1" + input: "bert/encoder/layer_10/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_10/attention/self/MatMul" + input: "gradients/bert/encoder/layer_10/attention/self/Softmax_grad/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_10/attention/self/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_10/attention/self/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/attention/self/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_10/attention/self/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/MatMul_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_10/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_10/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/MatMul_grad/MatMul_1" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_10/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_10/attention/self/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/transpose_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_10/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/transpose_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_10/attention/self/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_10/attention/self/transpose_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/transpose_1_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_10/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/transpose_1_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_10/attention/self/MatMul_grad/MatMul_1" + input: "gradients/bert/encoder/layer_10/attention/self/transpose_1_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/attention/self/transpose_grad/transpose" + input: "gradients/bert/encoder/layer_10/attention/self/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_10/attention/self/transpose_1_grad/transpose" + input: "gradients/bert/encoder/layer_10/attention/self/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/query/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_10/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/key/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_10/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/query/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_10/attention/self/Reshape_grad/Reshape" + input: "bert/encoder/layer_10/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/query/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_9/output/layer_normalization_20/add" + input: "gradients/bert/encoder/layer_10/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/key/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_10/attention/self/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_10/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_10/attention/self/key/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_9/output/layer_normalization_20/add" + input: "gradients/bert/encoder/layer_10/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_5" + op: "AddN" + input: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_10/attention/self/value/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_10/attention/self/query/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_10/attention/self/key/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/layer_normalization_20/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/layer_normalization_20/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_5" + input: "gradients/bert/encoder/layer_9/output/layer_normalization_20/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/layer_normalization_20/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/layer_normalization_20/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/output/layer_normalization_20/add_grad/Sum" + input: "gradients/bert/encoder/layer_9/output/layer_normalization_20/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/layer_normalization_20/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_5" + input: "bert/encoder/layer_9/output/layer_normalization_20/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/layer_normalization_20/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_9/output/layer_normalization_20/Reshape_1" + input: "gradients/AddN_5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/layer_normalization_20/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/layer_normalization_20/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_9/output/layer_normalization_20/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_9/output/layer_normalization_20/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/layer_normalization_20/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/layer_normalization_20/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/output/layer_normalization_20/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_9/output/layer_normalization_20/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/layer_normalization_20/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/layer_normalization_20/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/output/layer_normalization_20/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_9/output/layer_normalization_20/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_20" + op: "ZerosLike" + input: "bert/encoder/layer_9/output/layer_normalization_20/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_21" + op: "ZerosLike" + input: "bert/encoder/layer_9/output/layer_normalization_20/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_22" + op: "ZerosLike" + input: "bert/encoder/layer_9/output/layer_normalization_20/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_23" + op: "ZerosLike" + input: "bert/encoder/layer_9/output/layer_normalization_20/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_24" + op: "ZerosLike" + input: "bert/encoder/layer_9/output/layer_normalization_20/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/layer_normalization_20/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_9/output/layer_normalization_20/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_9/output/layer_normalization_20/Reshape" + input: "bert/encoder/layer_9/output/layer_normalization_20/ones" + input: "bert/encoder/layer_9/output/layer_normalization_20/FusedBatchNormV3:3" + input: "bert/encoder/layer_9/output/layer_normalization_20/FusedBatchNormV3:4" + input: "bert/encoder/layer_9/output/layer_normalization_20/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/layer_normalization_20/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/layer_normalization_20/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/output/layer_normalization_20/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_9/output/layer_normalization_20/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_9/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_9/output/layer_normalization_20/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_9/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_9/output/layer_normalization_20/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_9/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_9/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_9/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_9/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_9/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_9/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_9/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_9/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_9/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_9/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_9/intermediate/dense/mul_3" + input: "gradients/bert/encoder/layer_9/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_3_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_9/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_9/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_3_grad/Mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_9/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_9/intermediate/dense/BiasAdd" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_2_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_3_grad/Mul_1" + input: "bert/encoder/layer_9/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_2_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_2_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_9/intermediate/dense/mul_2/x" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_3_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul_2" + } + } + } + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/add_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/add_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_9/intermediate/dense/add_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/add_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/add_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/add_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/intermediate/dense/add_1_grad/Sum" + input: "gradients/bert/encoder/layer_9/intermediate/dense/add_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Tanh_grad/TanhGrad" + op: "TanhGrad" + input: "bert/encoder/layer_9/intermediate/dense/Tanh" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_2_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Tanh" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_1_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Tanh_grad/TanhGrad" + input: "bert/encoder/layer_9/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_1_grad/Mul" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_1_grad/Sum" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_1_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_9/intermediate/dense/mul_1/x" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Tanh_grad/TanhGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_1_grad/Mul_1" + input: "bert/encoder/layer_9/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_grad/Mul" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_grad/Sum" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_9/intermediate/dense/mul/x" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_1_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_9/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/sub/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/sub" + op: "Sub" + input: "bert/encoder/layer_9/intermediate/dense/Pow/y" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/sub/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Pow" + op: "Pow" + input: "bert/encoder/layer_9/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/sub" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/mul" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Greater/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Greater" + op: "Greater" + input: "bert/encoder/layer_9/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Greater/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/ones_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/ones_like" + op: "Fill" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/ones_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Select" + op: "Select" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Greater" + input: "bert/encoder/layer_9/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/ones_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Log" + op: "Log" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Select" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/zeros_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/zeros_like" + op: "Fill" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/zeros_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Select_1" + op: "Select" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Greater" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Log" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/zeros_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/mul_2" + op: "Mul" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_9/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/mul_3" + op: "Mul" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/mul_2" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Select_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/mul_3" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Sum" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/AddN_6" + op: "AddN" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_3_grad/Mul" + input: "gradients/bert/encoder/layer_9/intermediate/dense/mul_1_grad/Mul_1" + input: "gradients/bert/encoder/layer_9/intermediate/dense/Pow_grad/mul_1" + attr { + key: "N" + value { + i: 3 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/AddN_6" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/AddN_6" + input: "bert/encoder/layer_9/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_9/intermediate/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/add" + input: "gradients/AddN_6" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_7" + op: "AddN" + input: "gradients/bert/encoder/layer_9/output/layer_normalization_20/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_9/intermediate/dense/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_7" + input: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/add_grad/Sum" + input: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_7" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape_1" + input: "gradients/AddN_7" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_25" + op: "ZerosLike" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_26" + op: "ZerosLike" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_27" + op: "ZerosLike" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_28" + op: "ZerosLike" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_29" + op: "ZerosLike" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/ones" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/FusedBatchNormV3:3" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/FusedBatchNormV3:4" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_9/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_9/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_9/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_9/attention/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_9/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_9/attention/self/Reshape_3" + input: "gradients/bert/encoder/layer_9/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/Reshape_3_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/Reshape_3_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/attention/output/dense/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_9/attention/self/Reshape_3_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/transpose_3_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_9/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/transpose_3_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_9/attention/self/Reshape_3_grad/Reshape" + input: "gradients/bert/encoder/layer_9/attention/self/transpose_3_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/MatMul_1_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_9/attention/self/transpose_3_grad/transpose" + input: "bert/encoder/layer_9/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/MatMul_1_grad/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_9/attention/self/dropout/SelectV2" + input: "gradients/bert/encoder/layer_9/attention/self/transpose_3_grad/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_9/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_9/attention/self/MatMul_1_grad/MatMul" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_9/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_9/attention/self/MatMul_1_grad/MatMul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/transpose_2_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_9/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/transpose_2_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_9/attention/self/MatMul_1_grad/MatMul_1" + input: "gradients/bert/encoder/layer_9/attention/self/transpose_2_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_9/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_9/attention/self/Softmax" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/Reshape_2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/Reshape_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/attention/self/transpose_2_grad/transpose" + input: "gradients/bert/encoder/layer_9/attention/self/Reshape_2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/Softmax_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_9/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/Softmax_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: -1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/Softmax_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_9/attention/self/Softmax_grad/mul" + input: "gradients/bert/encoder/layer_9/attention/self/Softmax_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/Softmax_grad/sub" + op: "Sub" + input: "gradients/bert/encoder/layer_9/attention/self/dropout/Mul_grad/Mul" + input: "gradients/bert/encoder/layer_9/attention/self/Softmax_grad/Sum" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/Softmax_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_9/attention/self/Softmax_grad/sub" + input: "bert/encoder/layer_9/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/value/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_9/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/add_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_9/attention/self/Softmax_grad/mul_1" + input: "gradients/bert/encoder/layer_9/attention/self/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\001\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/attention/self/add_grad/Sum" + input: "gradients/bert/encoder/layer_9/attention/self/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/value/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_9/attention/self/Reshape_2_grad/Reshape" + input: "bert/encoder/layer_9/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/value/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_8/output/layer_normalization_18/add" + input: "gradients/bert/encoder/layer_9/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_9/attention/self/Softmax_grad/mul_1" + input: "bert/encoder/layer_9/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_9/attention/self/MatMul" + input: "gradients/bert/encoder/layer_9/attention/self/Softmax_grad/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_9/attention/self/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_9/attention/self/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/attention/self/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_9/attention/self/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/MatMul_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_9/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_9/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/MatMul_grad/MatMul_1" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_9/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_9/attention/self/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/transpose_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_9/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/transpose_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_9/attention/self/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_9/attention/self/transpose_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/transpose_1_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_9/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/transpose_1_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_9/attention/self/MatMul_grad/MatMul_1" + input: "gradients/bert/encoder/layer_9/attention/self/transpose_1_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/attention/self/transpose_grad/transpose" + input: "gradients/bert/encoder/layer_9/attention/self/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_9/attention/self/transpose_1_grad/transpose" + input: "gradients/bert/encoder/layer_9/attention/self/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/query/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_9/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/key/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_9/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/query/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_9/attention/self/Reshape_grad/Reshape" + input: "bert/encoder/layer_9/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/query/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_8/output/layer_normalization_18/add" + input: "gradients/bert/encoder/layer_9/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/key/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_9/attention/self/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_9/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_9/attention/self/key/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_8/output/layer_normalization_18/add" + input: "gradients/bert/encoder/layer_9/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_8" + op: "AddN" + input: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_9/attention/self/value/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_9/attention/self/query/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_9/attention/self/key/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/layer_normalization_18/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/layer_normalization_18/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_8" + input: "gradients/bert/encoder/layer_8/output/layer_normalization_18/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/layer_normalization_18/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/layer_normalization_18/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/output/layer_normalization_18/add_grad/Sum" + input: "gradients/bert/encoder/layer_8/output/layer_normalization_18/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/layer_normalization_18/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_8" + input: "bert/encoder/layer_8/output/layer_normalization_18/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/layer_normalization_18/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_8/output/layer_normalization_18/Reshape_1" + input: "gradients/AddN_8" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/layer_normalization_18/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/layer_normalization_18/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_8/output/layer_normalization_18/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_8/output/layer_normalization_18/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/layer_normalization_18/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/layer_normalization_18/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/output/layer_normalization_18/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_8/output/layer_normalization_18/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/layer_normalization_18/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/layer_normalization_18/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/output/layer_normalization_18/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_8/output/layer_normalization_18/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_30" + op: "ZerosLike" + input: "bert/encoder/layer_8/output/layer_normalization_18/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_31" + op: "ZerosLike" + input: "bert/encoder/layer_8/output/layer_normalization_18/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_32" + op: "ZerosLike" + input: "bert/encoder/layer_8/output/layer_normalization_18/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_33" + op: "ZerosLike" + input: "bert/encoder/layer_8/output/layer_normalization_18/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_34" + op: "ZerosLike" + input: "bert/encoder/layer_8/output/layer_normalization_18/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/layer_normalization_18/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_8/output/layer_normalization_18/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_8/output/layer_normalization_18/Reshape" + input: "bert/encoder/layer_8/output/layer_normalization_18/ones" + input: "bert/encoder/layer_8/output/layer_normalization_18/FusedBatchNormV3:3" + input: "bert/encoder/layer_8/output/layer_normalization_18/FusedBatchNormV3:4" + input: "bert/encoder/layer_8/output/layer_normalization_18/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/layer_normalization_18/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/layer_normalization_18/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/output/layer_normalization_18/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_8/output/layer_normalization_18/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_8/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_8/output/layer_normalization_18/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_8/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_8/output/layer_normalization_18/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_8/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_8/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_8/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_8/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_8/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_8/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_8/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_8/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_8/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_8/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_8/intermediate/dense/mul_3" + input: "gradients/bert/encoder/layer_8/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_3_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_8/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_8/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_3_grad/Mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_8/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_8/intermediate/dense/BiasAdd" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_2_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_3_grad/Mul_1" + input: "bert/encoder/layer_8/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_2_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_2_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_8/intermediate/dense/mul_2/x" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_3_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul_2" + } + } + } + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/add_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/add_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_8/intermediate/dense/add_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/add_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/add_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/add_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/intermediate/dense/add_1_grad/Sum" + input: "gradients/bert/encoder/layer_8/intermediate/dense/add_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Tanh_grad/TanhGrad" + op: "TanhGrad" + input: "bert/encoder/layer_8/intermediate/dense/Tanh" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_2_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Tanh" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_1_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Tanh_grad/TanhGrad" + input: "bert/encoder/layer_8/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_1_grad/Mul" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_1_grad/Sum" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_1_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_8/intermediate/dense/mul_1/x" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Tanh_grad/TanhGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_1_grad/Mul_1" + input: "bert/encoder/layer_8/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_grad/Mul" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_grad/Sum" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_8/intermediate/dense/mul/x" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_1_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_8/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/sub/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/sub" + op: "Sub" + input: "bert/encoder/layer_8/intermediate/dense/Pow/y" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/sub/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Pow" + op: "Pow" + input: "bert/encoder/layer_8/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/sub" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/mul" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Greater/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Greater" + op: "Greater" + input: "bert/encoder/layer_8/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Greater/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/ones_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/ones_like" + op: "Fill" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/ones_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Select" + op: "Select" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Greater" + input: "bert/encoder/layer_8/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/ones_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Log" + op: "Log" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Select" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/zeros_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/zeros_like" + op: "Fill" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/zeros_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Select_1" + op: "Select" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Greater" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Log" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/zeros_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/mul_2" + op: "Mul" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_8/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/mul_3" + op: "Mul" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/mul_2" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Select_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/mul_3" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Sum" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/AddN_9" + op: "AddN" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_3_grad/Mul" + input: "gradients/bert/encoder/layer_8/intermediate/dense/mul_1_grad/Mul_1" + input: "gradients/bert/encoder/layer_8/intermediate/dense/Pow_grad/mul_1" + attr { + key: "N" + value { + i: 3 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/AddN_9" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/AddN_9" + input: "bert/encoder/layer_8/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_8/intermediate/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/add" + input: "gradients/AddN_9" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_10" + op: "AddN" + input: "gradients/bert/encoder/layer_8/output/layer_normalization_18/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_8/intermediate/dense/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_10" + input: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/add_grad/Sum" + input: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_10" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape_1" + input: "gradients/AddN_10" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_35" + op: "ZerosLike" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_36" + op: "ZerosLike" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_37" + op: "ZerosLike" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_38" + op: "ZerosLike" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_39" + op: "ZerosLike" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/ones" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/FusedBatchNormV3:3" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/FusedBatchNormV3:4" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_8/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_8/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_8/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_8/attention/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_8/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_8/attention/self/Reshape_3" + input: "gradients/bert/encoder/layer_8/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/Reshape_3_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/Reshape_3_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/attention/output/dense/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_8/attention/self/Reshape_3_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/transpose_3_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_8/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/transpose_3_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_8/attention/self/Reshape_3_grad/Reshape" + input: "gradients/bert/encoder/layer_8/attention/self/transpose_3_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/MatMul_1_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_8/attention/self/transpose_3_grad/transpose" + input: "bert/encoder/layer_8/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/MatMul_1_grad/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_8/attention/self/dropout/SelectV2" + input: "gradients/bert/encoder/layer_8/attention/self/transpose_3_grad/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_8/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_8/attention/self/MatMul_1_grad/MatMul" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_8/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_8/attention/self/MatMul_1_grad/MatMul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/transpose_2_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_8/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/transpose_2_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_8/attention/self/MatMul_1_grad/MatMul_1" + input: "gradients/bert/encoder/layer_8/attention/self/transpose_2_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_8/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_8/attention/self/Softmax" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/Reshape_2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/Reshape_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/attention/self/transpose_2_grad/transpose" + input: "gradients/bert/encoder/layer_8/attention/self/Reshape_2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/Softmax_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_8/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/Softmax_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: -1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/Softmax_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_8/attention/self/Softmax_grad/mul" + input: "gradients/bert/encoder/layer_8/attention/self/Softmax_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/Softmax_grad/sub" + op: "Sub" + input: "gradients/bert/encoder/layer_8/attention/self/dropout/Mul_grad/Mul" + input: "gradients/bert/encoder/layer_8/attention/self/Softmax_grad/Sum" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/Softmax_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_8/attention/self/Softmax_grad/sub" + input: "bert/encoder/layer_8/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/value/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_8/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/add_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_8/attention/self/Softmax_grad/mul_1" + input: "gradients/bert/encoder/layer_8/attention/self/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\001\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/attention/self/add_grad/Sum" + input: "gradients/bert/encoder/layer_8/attention/self/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/value/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_8/attention/self/Reshape_2_grad/Reshape" + input: "bert/encoder/layer_8/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/value/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_7/output/layer_normalization_16/add" + input: "gradients/bert/encoder/layer_8/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_8/attention/self/Softmax_grad/mul_1" + input: "bert/encoder/layer_8/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_8/attention/self/MatMul" + input: "gradients/bert/encoder/layer_8/attention/self/Softmax_grad/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_8/attention/self/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_8/attention/self/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/attention/self/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_8/attention/self/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/MatMul_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_8/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_8/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/MatMul_grad/MatMul_1" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_8/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_8/attention/self/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/transpose_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_8/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/transpose_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_8/attention/self/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_8/attention/self/transpose_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/transpose_1_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_8/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/transpose_1_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_8/attention/self/MatMul_grad/MatMul_1" + input: "gradients/bert/encoder/layer_8/attention/self/transpose_1_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/attention/self/transpose_grad/transpose" + input: "gradients/bert/encoder/layer_8/attention/self/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_8/attention/self/transpose_1_grad/transpose" + input: "gradients/bert/encoder/layer_8/attention/self/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/query/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_8/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/key/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_8/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/query/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_8/attention/self/Reshape_grad/Reshape" + input: "bert/encoder/layer_8/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/query/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_7/output/layer_normalization_16/add" + input: "gradients/bert/encoder/layer_8/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/key/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_8/attention/self/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_8/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_8/attention/self/key/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_7/output/layer_normalization_16/add" + input: "gradients/bert/encoder/layer_8/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_11" + op: "AddN" + input: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_8/attention/self/value/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_8/attention/self/query/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_8/attention/self/key/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/layer_normalization_16/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/layer_normalization_16/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_11" + input: "gradients/bert/encoder/layer_7/output/layer_normalization_16/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/layer_normalization_16/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/layer_normalization_16/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/output/layer_normalization_16/add_grad/Sum" + input: "gradients/bert/encoder/layer_7/output/layer_normalization_16/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/layer_normalization_16/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_11" + input: "bert/encoder/layer_7/output/layer_normalization_16/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/layer_normalization_16/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_7/output/layer_normalization_16/Reshape_1" + input: "gradients/AddN_11" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/layer_normalization_16/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/layer_normalization_16/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_7/output/layer_normalization_16/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_7/output/layer_normalization_16/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/layer_normalization_16/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/layer_normalization_16/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/output/layer_normalization_16/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_7/output/layer_normalization_16/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/layer_normalization_16/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/layer_normalization_16/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/output/layer_normalization_16/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_7/output/layer_normalization_16/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_40" + op: "ZerosLike" + input: "bert/encoder/layer_7/output/layer_normalization_16/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_41" + op: "ZerosLike" + input: "bert/encoder/layer_7/output/layer_normalization_16/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_42" + op: "ZerosLike" + input: "bert/encoder/layer_7/output/layer_normalization_16/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_43" + op: "ZerosLike" + input: "bert/encoder/layer_7/output/layer_normalization_16/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_44" + op: "ZerosLike" + input: "bert/encoder/layer_7/output/layer_normalization_16/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/layer_normalization_16/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_7/output/layer_normalization_16/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_7/output/layer_normalization_16/Reshape" + input: "bert/encoder/layer_7/output/layer_normalization_16/ones" + input: "bert/encoder/layer_7/output/layer_normalization_16/FusedBatchNormV3:3" + input: "bert/encoder/layer_7/output/layer_normalization_16/FusedBatchNormV3:4" + input: "bert/encoder/layer_7/output/layer_normalization_16/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/layer_normalization_16/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/layer_normalization_16/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/output/layer_normalization_16/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_7/output/layer_normalization_16/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_7/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_7/output/layer_normalization_16/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_7/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_7/output/layer_normalization_16/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_7/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_7/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_7/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_7/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_7/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_7/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_7/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_7/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_7/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_7/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_7/intermediate/dense/mul_3" + input: "gradients/bert/encoder/layer_7/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_3_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_7/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_7/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_3_grad/Mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_7/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_7/intermediate/dense/BiasAdd" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_2_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_3_grad/Mul_1" + input: "bert/encoder/layer_7/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_2_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_2_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_7/intermediate/dense/mul_2/x" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_3_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul_2" + } + } + } + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/add_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/add_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_7/intermediate/dense/add_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/add_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/add_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/add_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/intermediate/dense/add_1_grad/Sum" + input: "gradients/bert/encoder/layer_7/intermediate/dense/add_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Tanh_grad/TanhGrad" + op: "TanhGrad" + input: "bert/encoder/layer_7/intermediate/dense/Tanh" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_2_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Tanh" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_1_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Tanh_grad/TanhGrad" + input: "bert/encoder/layer_7/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_1_grad/Mul" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_1_grad/Sum" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_1_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_7/intermediate/dense/mul_1/x" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Tanh_grad/TanhGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_1_grad/Mul_1" + input: "bert/encoder/layer_7/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_grad/Mul" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_grad/Sum" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_7/intermediate/dense/mul/x" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_1_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_7/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/sub/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/sub" + op: "Sub" + input: "bert/encoder/layer_7/intermediate/dense/Pow/y" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/sub/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Pow" + op: "Pow" + input: "bert/encoder/layer_7/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/sub" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/mul" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Greater/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Greater" + op: "Greater" + input: "bert/encoder/layer_7/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Greater/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/ones_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/ones_like" + op: "Fill" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/ones_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Select" + op: "Select" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Greater" + input: "bert/encoder/layer_7/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/ones_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Log" + op: "Log" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Select" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/zeros_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/zeros_like" + op: "Fill" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/zeros_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Select_1" + op: "Select" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Greater" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Log" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/zeros_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/mul_2" + op: "Mul" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_7/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/mul_3" + op: "Mul" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/mul_2" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Select_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/mul_3" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Sum" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/AddN_12" + op: "AddN" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_3_grad/Mul" + input: "gradients/bert/encoder/layer_7/intermediate/dense/mul_1_grad/Mul_1" + input: "gradients/bert/encoder/layer_7/intermediate/dense/Pow_grad/mul_1" + attr { + key: "N" + value { + i: 3 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/AddN_12" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/AddN_12" + input: "bert/encoder/layer_7/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_7/intermediate/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/add" + input: "gradients/AddN_12" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_13" + op: "AddN" + input: "gradients/bert/encoder/layer_7/output/layer_normalization_16/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_7/intermediate/dense/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_13" + input: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/add_grad/Sum" + input: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_13" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape_1" + input: "gradients/AddN_13" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_45" + op: "ZerosLike" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_46" + op: "ZerosLike" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_47" + op: "ZerosLike" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_48" + op: "ZerosLike" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_49" + op: "ZerosLike" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/ones" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/FusedBatchNormV3:3" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/FusedBatchNormV3:4" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_7/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_7/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_7/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_7/attention/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_7/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_7/attention/self/Reshape_3" + input: "gradients/bert/encoder/layer_7/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/Reshape_3_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/Reshape_3_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/attention/output/dense/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_7/attention/self/Reshape_3_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/transpose_3_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_7/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/transpose_3_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_7/attention/self/Reshape_3_grad/Reshape" + input: "gradients/bert/encoder/layer_7/attention/self/transpose_3_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/MatMul_1_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_7/attention/self/transpose_3_grad/transpose" + input: "bert/encoder/layer_7/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/MatMul_1_grad/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_7/attention/self/dropout/SelectV2" + input: "gradients/bert/encoder/layer_7/attention/self/transpose_3_grad/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_7/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_7/attention/self/MatMul_1_grad/MatMul" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_7/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_7/attention/self/MatMul_1_grad/MatMul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/transpose_2_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_7/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/transpose_2_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_7/attention/self/MatMul_1_grad/MatMul_1" + input: "gradients/bert/encoder/layer_7/attention/self/transpose_2_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_7/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_7/attention/self/Softmax" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/Reshape_2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/Reshape_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/attention/self/transpose_2_grad/transpose" + input: "gradients/bert/encoder/layer_7/attention/self/Reshape_2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/Softmax_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_7/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/Softmax_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: -1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/Softmax_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_7/attention/self/Softmax_grad/mul" + input: "gradients/bert/encoder/layer_7/attention/self/Softmax_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/Softmax_grad/sub" + op: "Sub" + input: "gradients/bert/encoder/layer_7/attention/self/dropout/Mul_grad/Mul" + input: "gradients/bert/encoder/layer_7/attention/self/Softmax_grad/Sum" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/Softmax_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_7/attention/self/Softmax_grad/sub" + input: "bert/encoder/layer_7/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/value/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_7/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/add_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_7/attention/self/Softmax_grad/mul_1" + input: "gradients/bert/encoder/layer_7/attention/self/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\001\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/attention/self/add_grad/Sum" + input: "gradients/bert/encoder/layer_7/attention/self/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/value/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_7/attention/self/Reshape_2_grad/Reshape" + input: "bert/encoder/layer_7/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/value/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_6/output/layer_normalization_14/add" + input: "gradients/bert/encoder/layer_7/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_7/attention/self/Softmax_grad/mul_1" + input: "bert/encoder/layer_7/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_7/attention/self/MatMul" + input: "gradients/bert/encoder/layer_7/attention/self/Softmax_grad/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_7/attention/self/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_7/attention/self/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/attention/self/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_7/attention/self/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/MatMul_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_7/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_7/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/MatMul_grad/MatMul_1" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_7/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_7/attention/self/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/transpose_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_7/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/transpose_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_7/attention/self/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_7/attention/self/transpose_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/transpose_1_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_7/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/transpose_1_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_7/attention/self/MatMul_grad/MatMul_1" + input: "gradients/bert/encoder/layer_7/attention/self/transpose_1_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/attention/self/transpose_grad/transpose" + input: "gradients/bert/encoder/layer_7/attention/self/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_7/attention/self/transpose_1_grad/transpose" + input: "gradients/bert/encoder/layer_7/attention/self/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/query/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_7/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/key/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_7/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/query/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_7/attention/self/Reshape_grad/Reshape" + input: "bert/encoder/layer_7/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/query/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_6/output/layer_normalization_14/add" + input: "gradients/bert/encoder/layer_7/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/key/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_7/attention/self/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_7/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_7/attention/self/key/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_6/output/layer_normalization_14/add" + input: "gradients/bert/encoder/layer_7/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_14" + op: "AddN" + input: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_7/attention/self/value/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_7/attention/self/query/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_7/attention/self/key/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/layer_normalization_14/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/layer_normalization_14/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_14" + input: "gradients/bert/encoder/layer_6/output/layer_normalization_14/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/layer_normalization_14/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/layer_normalization_14/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/output/layer_normalization_14/add_grad/Sum" + input: "gradients/bert/encoder/layer_6/output/layer_normalization_14/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/layer_normalization_14/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_14" + input: "bert/encoder/layer_6/output/layer_normalization_14/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/layer_normalization_14/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_6/output/layer_normalization_14/Reshape_1" + input: "gradients/AddN_14" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/layer_normalization_14/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/layer_normalization_14/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_6/output/layer_normalization_14/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_6/output/layer_normalization_14/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/layer_normalization_14/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/layer_normalization_14/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/output/layer_normalization_14/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_6/output/layer_normalization_14/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/layer_normalization_14/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/layer_normalization_14/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/output/layer_normalization_14/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_6/output/layer_normalization_14/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_50" + op: "ZerosLike" + input: "bert/encoder/layer_6/output/layer_normalization_14/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_51" + op: "ZerosLike" + input: "bert/encoder/layer_6/output/layer_normalization_14/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_52" + op: "ZerosLike" + input: "bert/encoder/layer_6/output/layer_normalization_14/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_53" + op: "ZerosLike" + input: "bert/encoder/layer_6/output/layer_normalization_14/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_54" + op: "ZerosLike" + input: "bert/encoder/layer_6/output/layer_normalization_14/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/layer_normalization_14/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_6/output/layer_normalization_14/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_6/output/layer_normalization_14/Reshape" + input: "bert/encoder/layer_6/output/layer_normalization_14/ones" + input: "bert/encoder/layer_6/output/layer_normalization_14/FusedBatchNormV3:3" + input: "bert/encoder/layer_6/output/layer_normalization_14/FusedBatchNormV3:4" + input: "bert/encoder/layer_6/output/layer_normalization_14/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/layer_normalization_14/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/layer_normalization_14/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/output/layer_normalization_14/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_6/output/layer_normalization_14/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_6/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_6/output/layer_normalization_14/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_6/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_6/output/layer_normalization_14/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_6/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_6/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_6/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_6/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_6/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_6/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_6/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_6/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_6/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_6/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_6/intermediate/dense/mul_3" + input: "gradients/bert/encoder/layer_6/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_3_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_6/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_6/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_3_grad/Mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_6/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_6/intermediate/dense/BiasAdd" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_2_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_3_grad/Mul_1" + input: "bert/encoder/layer_6/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_2_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_2_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_6/intermediate/dense/mul_2/x" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_3_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul_2" + } + } + } + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/add_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/add_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_6/intermediate/dense/add_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/add_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/add_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/add_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/intermediate/dense/add_1_grad/Sum" + input: "gradients/bert/encoder/layer_6/intermediate/dense/add_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Tanh_grad/TanhGrad" + op: "TanhGrad" + input: "bert/encoder/layer_6/intermediate/dense/Tanh" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_2_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Tanh" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_1_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Tanh_grad/TanhGrad" + input: "bert/encoder/layer_6/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_1_grad/Mul" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_1_grad/Sum" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_1_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_6/intermediate/dense/mul_1/x" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Tanh_grad/TanhGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_1_grad/Mul_1" + input: "bert/encoder/layer_6/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_grad/Mul" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_grad/Sum" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_6/intermediate/dense/mul/x" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_1_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_6/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/sub/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/sub" + op: "Sub" + input: "bert/encoder/layer_6/intermediate/dense/Pow/y" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/sub/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Pow" + op: "Pow" + input: "bert/encoder/layer_6/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/sub" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/mul" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Greater/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Greater" + op: "Greater" + input: "bert/encoder/layer_6/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Greater/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/ones_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/ones_like" + op: "Fill" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/ones_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Select" + op: "Select" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Greater" + input: "bert/encoder/layer_6/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/ones_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Log" + op: "Log" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Select" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/zeros_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/zeros_like" + op: "Fill" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/zeros_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Select_1" + op: "Select" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Greater" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Log" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/zeros_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/mul_2" + op: "Mul" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_6/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/mul_3" + op: "Mul" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/mul_2" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Select_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/mul_3" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Sum" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/AddN_15" + op: "AddN" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_3_grad/Mul" + input: "gradients/bert/encoder/layer_6/intermediate/dense/mul_1_grad/Mul_1" + input: "gradients/bert/encoder/layer_6/intermediate/dense/Pow_grad/mul_1" + attr { + key: "N" + value { + i: 3 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/AddN_15" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/AddN_15" + input: "bert/encoder/layer_6/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_6/intermediate/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/add" + input: "gradients/AddN_15" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_16" + op: "AddN" + input: "gradients/bert/encoder/layer_6/output/layer_normalization_14/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_6/intermediate/dense/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_16" + input: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/add_grad/Sum" + input: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_16" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape_1" + input: "gradients/AddN_16" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_55" + op: "ZerosLike" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_56" + op: "ZerosLike" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_57" + op: "ZerosLike" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_58" + op: "ZerosLike" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_59" + op: "ZerosLike" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/ones" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/FusedBatchNormV3:3" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/FusedBatchNormV3:4" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_6/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_6/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_6/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_6/attention/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_6/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_6/attention/self/Reshape_3" + input: "gradients/bert/encoder/layer_6/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/Reshape_3_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/Reshape_3_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/attention/output/dense/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_6/attention/self/Reshape_3_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/transpose_3_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_6/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/transpose_3_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_6/attention/self/Reshape_3_grad/Reshape" + input: "gradients/bert/encoder/layer_6/attention/self/transpose_3_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/MatMul_1_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_6/attention/self/transpose_3_grad/transpose" + input: "bert/encoder/layer_6/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/MatMul_1_grad/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_6/attention/self/dropout/SelectV2" + input: "gradients/bert/encoder/layer_6/attention/self/transpose_3_grad/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_6/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_6/attention/self/MatMul_1_grad/MatMul" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_6/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_6/attention/self/MatMul_1_grad/MatMul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/transpose_2_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_6/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/transpose_2_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_6/attention/self/MatMul_1_grad/MatMul_1" + input: "gradients/bert/encoder/layer_6/attention/self/transpose_2_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_6/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_6/attention/self/Softmax" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/Reshape_2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/Reshape_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/attention/self/transpose_2_grad/transpose" + input: "gradients/bert/encoder/layer_6/attention/self/Reshape_2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/Softmax_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_6/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/Softmax_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: -1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/Softmax_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_6/attention/self/Softmax_grad/mul" + input: "gradients/bert/encoder/layer_6/attention/self/Softmax_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/Softmax_grad/sub" + op: "Sub" + input: "gradients/bert/encoder/layer_6/attention/self/dropout/Mul_grad/Mul" + input: "gradients/bert/encoder/layer_6/attention/self/Softmax_grad/Sum" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/Softmax_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_6/attention/self/Softmax_grad/sub" + input: "bert/encoder/layer_6/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/value/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_6/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/add_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_6/attention/self/Softmax_grad/mul_1" + input: "gradients/bert/encoder/layer_6/attention/self/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\001\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/attention/self/add_grad/Sum" + input: "gradients/bert/encoder/layer_6/attention/self/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/value/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_6/attention/self/Reshape_2_grad/Reshape" + input: "bert/encoder/layer_6/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/value/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_5/output/layer_normalization_12/add" + input: "gradients/bert/encoder/layer_6/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_6/attention/self/Softmax_grad/mul_1" + input: "bert/encoder/layer_6/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_6/attention/self/MatMul" + input: "gradients/bert/encoder/layer_6/attention/self/Softmax_grad/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_6/attention/self/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_6/attention/self/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/attention/self/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_6/attention/self/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/MatMul_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_6/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_6/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/MatMul_grad/MatMul_1" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_6/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_6/attention/self/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/transpose_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_6/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/transpose_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_6/attention/self/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_6/attention/self/transpose_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/transpose_1_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_6/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/transpose_1_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_6/attention/self/MatMul_grad/MatMul_1" + input: "gradients/bert/encoder/layer_6/attention/self/transpose_1_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/attention/self/transpose_grad/transpose" + input: "gradients/bert/encoder/layer_6/attention/self/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_6/attention/self/transpose_1_grad/transpose" + input: "gradients/bert/encoder/layer_6/attention/self/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/query/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_6/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/key/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_6/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/query/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_6/attention/self/Reshape_grad/Reshape" + input: "bert/encoder/layer_6/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/query/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_5/output/layer_normalization_12/add" + input: "gradients/bert/encoder/layer_6/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/key/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_6/attention/self/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_6/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_6/attention/self/key/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_5/output/layer_normalization_12/add" + input: "gradients/bert/encoder/layer_6/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_17" + op: "AddN" + input: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_6/attention/self/value/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_6/attention/self/query/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_6/attention/self/key/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/layer_normalization_12/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/layer_normalization_12/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_17" + input: "gradients/bert/encoder/layer_5/output/layer_normalization_12/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/layer_normalization_12/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/layer_normalization_12/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/output/layer_normalization_12/add_grad/Sum" + input: "gradients/bert/encoder/layer_5/output/layer_normalization_12/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/layer_normalization_12/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_17" + input: "bert/encoder/layer_5/output/layer_normalization_12/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/layer_normalization_12/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_5/output/layer_normalization_12/Reshape_1" + input: "gradients/AddN_17" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/layer_normalization_12/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/layer_normalization_12/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_5/output/layer_normalization_12/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_5/output/layer_normalization_12/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/layer_normalization_12/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/layer_normalization_12/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/output/layer_normalization_12/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_5/output/layer_normalization_12/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/layer_normalization_12/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/layer_normalization_12/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/output/layer_normalization_12/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_5/output/layer_normalization_12/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_60" + op: "ZerosLike" + input: "bert/encoder/layer_5/output/layer_normalization_12/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_61" + op: "ZerosLike" + input: "bert/encoder/layer_5/output/layer_normalization_12/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_62" + op: "ZerosLike" + input: "bert/encoder/layer_5/output/layer_normalization_12/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_63" + op: "ZerosLike" + input: "bert/encoder/layer_5/output/layer_normalization_12/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_64" + op: "ZerosLike" + input: "bert/encoder/layer_5/output/layer_normalization_12/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/layer_normalization_12/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_5/output/layer_normalization_12/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_5/output/layer_normalization_12/Reshape" + input: "bert/encoder/layer_5/output/layer_normalization_12/ones" + input: "bert/encoder/layer_5/output/layer_normalization_12/FusedBatchNormV3:3" + input: "bert/encoder/layer_5/output/layer_normalization_12/FusedBatchNormV3:4" + input: "bert/encoder/layer_5/output/layer_normalization_12/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/layer_normalization_12/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/layer_normalization_12/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/output/layer_normalization_12/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_5/output/layer_normalization_12/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_5/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_5/output/layer_normalization_12/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_5/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_5/output/layer_normalization_12/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_5/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_5/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_5/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_5/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_5/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_5/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_5/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_5/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_5/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_5/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_5/intermediate/dense/mul_3" + input: "gradients/bert/encoder/layer_5/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_3_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_5/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_5/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_3_grad/Mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_5/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_5/intermediate/dense/BiasAdd" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_2_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_3_grad/Mul_1" + input: "bert/encoder/layer_5/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_2_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_2_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_5/intermediate/dense/mul_2/x" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_3_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul_2" + } + } + } + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/add_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/add_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_5/intermediate/dense/add_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/add_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/add_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/add_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/intermediate/dense/add_1_grad/Sum" + input: "gradients/bert/encoder/layer_5/intermediate/dense/add_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Tanh_grad/TanhGrad" + op: "TanhGrad" + input: "bert/encoder/layer_5/intermediate/dense/Tanh" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_2_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Tanh" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_1_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Tanh_grad/TanhGrad" + input: "bert/encoder/layer_5/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_1_grad/Mul" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_1_grad/Sum" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_1_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_5/intermediate/dense/mul_1/x" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Tanh_grad/TanhGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_1_grad/Mul_1" + input: "bert/encoder/layer_5/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_grad/Mul" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_grad/Sum" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_5/intermediate/dense/mul/x" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_1_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_5/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/sub/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/sub" + op: "Sub" + input: "bert/encoder/layer_5/intermediate/dense/Pow/y" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/sub/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Pow" + op: "Pow" + input: "bert/encoder/layer_5/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/sub" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/mul" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Greater/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Greater" + op: "Greater" + input: "bert/encoder/layer_5/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Greater/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/ones_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/ones_like" + op: "Fill" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/ones_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Select" + op: "Select" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Greater" + input: "bert/encoder/layer_5/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/ones_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Log" + op: "Log" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Select" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/zeros_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/zeros_like" + op: "Fill" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/zeros_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Select_1" + op: "Select" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Greater" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Log" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/zeros_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/mul_2" + op: "Mul" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_5/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/mul_3" + op: "Mul" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/mul_2" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Select_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/mul_3" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Sum" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/AddN_18" + op: "AddN" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_3_grad/Mul" + input: "gradients/bert/encoder/layer_5/intermediate/dense/mul_1_grad/Mul_1" + input: "gradients/bert/encoder/layer_5/intermediate/dense/Pow_grad/mul_1" + attr { + key: "N" + value { + i: 3 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/AddN_18" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/AddN_18" + input: "bert/encoder/layer_5/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_5/intermediate/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/add" + input: "gradients/AddN_18" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_19" + op: "AddN" + input: "gradients/bert/encoder/layer_5/output/layer_normalization_12/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_5/intermediate/dense/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_19" + input: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/add_grad/Sum" + input: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_19" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape_1" + input: "gradients/AddN_19" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_65" + op: "ZerosLike" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_66" + op: "ZerosLike" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_67" + op: "ZerosLike" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_68" + op: "ZerosLike" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_69" + op: "ZerosLike" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/ones" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/FusedBatchNormV3:3" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/FusedBatchNormV3:4" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_5/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_5/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_5/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_5/attention/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_5/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_5/attention/self/Reshape_3" + input: "gradients/bert/encoder/layer_5/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/Reshape_3_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/Reshape_3_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/attention/output/dense/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_5/attention/self/Reshape_3_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/transpose_3_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_5/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/transpose_3_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_5/attention/self/Reshape_3_grad/Reshape" + input: "gradients/bert/encoder/layer_5/attention/self/transpose_3_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/MatMul_1_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_5/attention/self/transpose_3_grad/transpose" + input: "bert/encoder/layer_5/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/MatMul_1_grad/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_5/attention/self/dropout/SelectV2" + input: "gradients/bert/encoder/layer_5/attention/self/transpose_3_grad/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_5/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_5/attention/self/MatMul_1_grad/MatMul" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_5/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_5/attention/self/MatMul_1_grad/MatMul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/transpose_2_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_5/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/transpose_2_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_5/attention/self/MatMul_1_grad/MatMul_1" + input: "gradients/bert/encoder/layer_5/attention/self/transpose_2_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_5/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_5/attention/self/Softmax" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/Reshape_2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/Reshape_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/attention/self/transpose_2_grad/transpose" + input: "gradients/bert/encoder/layer_5/attention/self/Reshape_2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/Softmax_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_5/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/Softmax_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: -1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/Softmax_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_5/attention/self/Softmax_grad/mul" + input: "gradients/bert/encoder/layer_5/attention/self/Softmax_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/Softmax_grad/sub" + op: "Sub" + input: "gradients/bert/encoder/layer_5/attention/self/dropout/Mul_grad/Mul" + input: "gradients/bert/encoder/layer_5/attention/self/Softmax_grad/Sum" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/Softmax_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_5/attention/self/Softmax_grad/sub" + input: "bert/encoder/layer_5/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/value/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_5/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/add_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_5/attention/self/Softmax_grad/mul_1" + input: "gradients/bert/encoder/layer_5/attention/self/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\001\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/attention/self/add_grad/Sum" + input: "gradients/bert/encoder/layer_5/attention/self/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/value/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_5/attention/self/Reshape_2_grad/Reshape" + input: "bert/encoder/layer_5/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/value/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_4/output/layer_normalization_10/add" + input: "gradients/bert/encoder/layer_5/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_5/attention/self/Softmax_grad/mul_1" + input: "bert/encoder/layer_5/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_5/attention/self/MatMul" + input: "gradients/bert/encoder/layer_5/attention/self/Softmax_grad/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_5/attention/self/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_5/attention/self/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/attention/self/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_5/attention/self/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/MatMul_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_5/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_5/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/MatMul_grad/MatMul_1" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_5/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_5/attention/self/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/transpose_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_5/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/transpose_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_5/attention/self/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_5/attention/self/transpose_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/transpose_1_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_5/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/transpose_1_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_5/attention/self/MatMul_grad/MatMul_1" + input: "gradients/bert/encoder/layer_5/attention/self/transpose_1_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/attention/self/transpose_grad/transpose" + input: "gradients/bert/encoder/layer_5/attention/self/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_5/attention/self/transpose_1_grad/transpose" + input: "gradients/bert/encoder/layer_5/attention/self/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/query/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_5/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/key/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_5/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/query/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_5/attention/self/Reshape_grad/Reshape" + input: "bert/encoder/layer_5/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/query/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_4/output/layer_normalization_10/add" + input: "gradients/bert/encoder/layer_5/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/key/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_5/attention/self/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_5/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_5/attention/self/key/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_4/output/layer_normalization_10/add" + input: "gradients/bert/encoder/layer_5/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_20" + op: "AddN" + input: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_5/attention/self/value/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_5/attention/self/query/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_5/attention/self/key/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/layer_normalization_10/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/layer_normalization_10/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_20" + input: "gradients/bert/encoder/layer_4/output/layer_normalization_10/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/layer_normalization_10/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/layer_normalization_10/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/output/layer_normalization_10/add_grad/Sum" + input: "gradients/bert/encoder/layer_4/output/layer_normalization_10/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/layer_normalization_10/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_20" + input: "bert/encoder/layer_4/output/layer_normalization_10/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/layer_normalization_10/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_4/output/layer_normalization_10/Reshape_1" + input: "gradients/AddN_20" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/layer_normalization_10/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/layer_normalization_10/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_4/output/layer_normalization_10/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_4/output/layer_normalization_10/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/layer_normalization_10/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/layer_normalization_10/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/output/layer_normalization_10/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_4/output/layer_normalization_10/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/layer_normalization_10/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/layer_normalization_10/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/output/layer_normalization_10/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_4/output/layer_normalization_10/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_70" + op: "ZerosLike" + input: "bert/encoder/layer_4/output/layer_normalization_10/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_71" + op: "ZerosLike" + input: "bert/encoder/layer_4/output/layer_normalization_10/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_72" + op: "ZerosLike" + input: "bert/encoder/layer_4/output/layer_normalization_10/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_73" + op: "ZerosLike" + input: "bert/encoder/layer_4/output/layer_normalization_10/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_74" + op: "ZerosLike" + input: "bert/encoder/layer_4/output/layer_normalization_10/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/layer_normalization_10/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_4/output/layer_normalization_10/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_4/output/layer_normalization_10/Reshape" + input: "bert/encoder/layer_4/output/layer_normalization_10/ones" + input: "bert/encoder/layer_4/output/layer_normalization_10/FusedBatchNormV3:3" + input: "bert/encoder/layer_4/output/layer_normalization_10/FusedBatchNormV3:4" + input: "bert/encoder/layer_4/output/layer_normalization_10/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/layer_normalization_10/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/layer_normalization_10/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/output/layer_normalization_10/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_4/output/layer_normalization_10/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_4/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_4/output/layer_normalization_10/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_4/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_4/output/layer_normalization_10/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_4/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_4/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_4/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_4/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_4/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_4/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_4/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_4/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_4/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_4/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_4/intermediate/dense/mul_3" + input: "gradients/bert/encoder/layer_4/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_3_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_4/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_4/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_3_grad/Mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_4/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_4/intermediate/dense/BiasAdd" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_2_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_3_grad/Mul_1" + input: "bert/encoder/layer_4/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_2_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_2_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_4/intermediate/dense/mul_2/x" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_3_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul_2" + } + } + } + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/add_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/add_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_4/intermediate/dense/add_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/add_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/add_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/add_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/intermediate/dense/add_1_grad/Sum" + input: "gradients/bert/encoder/layer_4/intermediate/dense/add_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Tanh_grad/TanhGrad" + op: "TanhGrad" + input: "bert/encoder/layer_4/intermediate/dense/Tanh" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_2_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Tanh" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_1_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Tanh_grad/TanhGrad" + input: "bert/encoder/layer_4/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_1_grad/Mul" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_1_grad/Sum" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_1_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_4/intermediate/dense/mul_1/x" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Tanh_grad/TanhGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_1_grad/Mul_1" + input: "bert/encoder/layer_4/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_grad/Mul" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_grad/Sum" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_4/intermediate/dense/mul/x" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_1_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_4/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/sub/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/sub" + op: "Sub" + input: "bert/encoder/layer_4/intermediate/dense/Pow/y" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/sub/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Pow" + op: "Pow" + input: "bert/encoder/layer_4/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/sub" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/mul" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Greater/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Greater" + op: "Greater" + input: "bert/encoder/layer_4/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Greater/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/ones_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/ones_like" + op: "Fill" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/ones_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Select" + op: "Select" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Greater" + input: "bert/encoder/layer_4/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/ones_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Log" + op: "Log" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Select" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/zeros_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/zeros_like" + op: "Fill" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/zeros_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Select_1" + op: "Select" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Greater" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Log" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/zeros_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/mul_2" + op: "Mul" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_4/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/mul_3" + op: "Mul" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/mul_2" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Select_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/mul_3" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Sum" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/AddN_21" + op: "AddN" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_3_grad/Mul" + input: "gradients/bert/encoder/layer_4/intermediate/dense/mul_1_grad/Mul_1" + input: "gradients/bert/encoder/layer_4/intermediate/dense/Pow_grad/mul_1" + attr { + key: "N" + value { + i: 3 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/AddN_21" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/AddN_21" + input: "bert/encoder/layer_4/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_4/intermediate/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/add" + input: "gradients/AddN_21" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_22" + op: "AddN" + input: "gradients/bert/encoder/layer_4/output/layer_normalization_10/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_4/intermediate/dense/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_22" + input: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/add_grad/Sum" + input: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_22" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape_1" + input: "gradients/AddN_22" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_75" + op: "ZerosLike" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_76" + op: "ZerosLike" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_77" + op: "ZerosLike" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_78" + op: "ZerosLike" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_79" + op: "ZerosLike" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/ones" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/FusedBatchNormV3:3" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/FusedBatchNormV3:4" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_4/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_4/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_4/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_4/attention/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_4/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_4/attention/self/Reshape_3" + input: "gradients/bert/encoder/layer_4/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/Reshape_3_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/Reshape_3_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/attention/output/dense/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_4/attention/self/Reshape_3_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/transpose_3_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_4/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/transpose_3_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_4/attention/self/Reshape_3_grad/Reshape" + input: "gradients/bert/encoder/layer_4/attention/self/transpose_3_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/MatMul_1_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_4/attention/self/transpose_3_grad/transpose" + input: "bert/encoder/layer_4/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/MatMul_1_grad/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_4/attention/self/dropout/SelectV2" + input: "gradients/bert/encoder/layer_4/attention/self/transpose_3_grad/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_4/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_4/attention/self/MatMul_1_grad/MatMul" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_4/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_4/attention/self/MatMul_1_grad/MatMul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/transpose_2_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_4/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/transpose_2_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_4/attention/self/MatMul_1_grad/MatMul_1" + input: "gradients/bert/encoder/layer_4/attention/self/transpose_2_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_4/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_4/attention/self/Softmax" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/Reshape_2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/Reshape_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/attention/self/transpose_2_grad/transpose" + input: "gradients/bert/encoder/layer_4/attention/self/Reshape_2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/Softmax_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_4/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/Softmax_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: -1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/Softmax_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_4/attention/self/Softmax_grad/mul" + input: "gradients/bert/encoder/layer_4/attention/self/Softmax_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/Softmax_grad/sub" + op: "Sub" + input: "gradients/bert/encoder/layer_4/attention/self/dropout/Mul_grad/Mul" + input: "gradients/bert/encoder/layer_4/attention/self/Softmax_grad/Sum" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/Softmax_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_4/attention/self/Softmax_grad/sub" + input: "bert/encoder/layer_4/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/value/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_4/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/add_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_4/attention/self/Softmax_grad/mul_1" + input: "gradients/bert/encoder/layer_4/attention/self/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\001\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/attention/self/add_grad/Sum" + input: "gradients/bert/encoder/layer_4/attention/self/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/value/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_4/attention/self/Reshape_2_grad/Reshape" + input: "bert/encoder/layer_4/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/value/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_3/output/layer_normalization_8/add" + input: "gradients/bert/encoder/layer_4/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_4/attention/self/Softmax_grad/mul_1" + input: "bert/encoder/layer_4/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_4/attention/self/MatMul" + input: "gradients/bert/encoder/layer_4/attention/self/Softmax_grad/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_4/attention/self/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_4/attention/self/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/attention/self/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_4/attention/self/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/MatMul_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_4/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_4/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/MatMul_grad/MatMul_1" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_4/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_4/attention/self/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/transpose_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_4/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/transpose_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_4/attention/self/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_4/attention/self/transpose_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/transpose_1_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_4/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/transpose_1_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_4/attention/self/MatMul_grad/MatMul_1" + input: "gradients/bert/encoder/layer_4/attention/self/transpose_1_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/attention/self/transpose_grad/transpose" + input: "gradients/bert/encoder/layer_4/attention/self/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_4/attention/self/transpose_1_grad/transpose" + input: "gradients/bert/encoder/layer_4/attention/self/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/query/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_4/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/key/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_4/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/query/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_4/attention/self/Reshape_grad/Reshape" + input: "bert/encoder/layer_4/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/query/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_3/output/layer_normalization_8/add" + input: "gradients/bert/encoder/layer_4/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/key/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_4/attention/self/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_4/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_4/attention/self/key/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_3/output/layer_normalization_8/add" + input: "gradients/bert/encoder/layer_4/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_23" + op: "AddN" + input: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_4/attention/self/value/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_4/attention/self/query/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_4/attention/self/key/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/layer_normalization_8/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/layer_normalization_8/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_23" + input: "gradients/bert/encoder/layer_3/output/layer_normalization_8/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/layer_normalization_8/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/layer_normalization_8/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/output/layer_normalization_8/add_grad/Sum" + input: "gradients/bert/encoder/layer_3/output/layer_normalization_8/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/layer_normalization_8/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_23" + input: "bert/encoder/layer_3/output/layer_normalization_8/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/layer_normalization_8/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_3/output/layer_normalization_8/Reshape_1" + input: "gradients/AddN_23" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/layer_normalization_8/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/layer_normalization_8/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_3/output/layer_normalization_8/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_3/output/layer_normalization_8/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/layer_normalization_8/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/layer_normalization_8/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/output/layer_normalization_8/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_3/output/layer_normalization_8/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/layer_normalization_8/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/layer_normalization_8/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/output/layer_normalization_8/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_3/output/layer_normalization_8/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_80" + op: "ZerosLike" + input: "bert/encoder/layer_3/output/layer_normalization_8/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_81" + op: "ZerosLike" + input: "bert/encoder/layer_3/output/layer_normalization_8/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_82" + op: "ZerosLike" + input: "bert/encoder/layer_3/output/layer_normalization_8/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_83" + op: "ZerosLike" + input: "bert/encoder/layer_3/output/layer_normalization_8/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_84" + op: "ZerosLike" + input: "bert/encoder/layer_3/output/layer_normalization_8/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/layer_normalization_8/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_3/output/layer_normalization_8/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_3/output/layer_normalization_8/Reshape" + input: "bert/encoder/layer_3/output/layer_normalization_8/ones" + input: "bert/encoder/layer_3/output/layer_normalization_8/FusedBatchNormV3:3" + input: "bert/encoder/layer_3/output/layer_normalization_8/FusedBatchNormV3:4" + input: "bert/encoder/layer_3/output/layer_normalization_8/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/layer_normalization_8/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/layer_normalization_8/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/output/layer_normalization_8/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_3/output/layer_normalization_8/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_3/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_3/output/layer_normalization_8/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_3/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_3/output/layer_normalization_8/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_3/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_3/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_3/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_3/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_3/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_3/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_3/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_3/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_3/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_3/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_3/intermediate/dense/mul_3" + input: "gradients/bert/encoder/layer_3/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_3_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_3/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_3/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_3_grad/Mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_3/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_3/intermediate/dense/BiasAdd" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_2_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_3_grad/Mul_1" + input: "bert/encoder/layer_3/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_2_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_2_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_3/intermediate/dense/mul_2/x" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_3_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul_2" + } + } + } + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/add_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/add_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_3/intermediate/dense/add_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/add_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/add_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/add_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/intermediate/dense/add_1_grad/Sum" + input: "gradients/bert/encoder/layer_3/intermediate/dense/add_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Tanh_grad/TanhGrad" + op: "TanhGrad" + input: "bert/encoder/layer_3/intermediate/dense/Tanh" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_2_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Tanh" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_1_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Tanh_grad/TanhGrad" + input: "bert/encoder/layer_3/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_1_grad/Mul" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_1_grad/Sum" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_1_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_3/intermediate/dense/mul_1/x" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Tanh_grad/TanhGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_1_grad/Mul_1" + input: "bert/encoder/layer_3/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_grad/Mul" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_grad/Sum" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_3/intermediate/dense/mul/x" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_1_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_3/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/sub/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/sub" + op: "Sub" + input: "bert/encoder/layer_3/intermediate/dense/Pow/y" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/sub/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Pow" + op: "Pow" + input: "bert/encoder/layer_3/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/sub" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/mul" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Greater/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Greater" + op: "Greater" + input: "bert/encoder/layer_3/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Greater/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/ones_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/ones_like" + op: "Fill" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/ones_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Select" + op: "Select" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Greater" + input: "bert/encoder/layer_3/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/ones_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Log" + op: "Log" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Select" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/zeros_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/zeros_like" + op: "Fill" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/zeros_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Select_1" + op: "Select" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Greater" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Log" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/zeros_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/mul_2" + op: "Mul" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_3/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/mul_3" + op: "Mul" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/mul_2" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Select_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/mul_3" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Sum" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/AddN_24" + op: "AddN" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_3_grad/Mul" + input: "gradients/bert/encoder/layer_3/intermediate/dense/mul_1_grad/Mul_1" + input: "gradients/bert/encoder/layer_3/intermediate/dense/Pow_grad/mul_1" + attr { + key: "N" + value { + i: 3 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/AddN_24" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/AddN_24" + input: "bert/encoder/layer_3/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_3/intermediate/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/add" + input: "gradients/AddN_24" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_25" + op: "AddN" + input: "gradients/bert/encoder/layer_3/output/layer_normalization_8/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_3/intermediate/dense/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_25" + input: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/add_grad/Sum" + input: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_25" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape_1" + input: "gradients/AddN_25" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_85" + op: "ZerosLike" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_86" + op: "ZerosLike" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_87" + op: "ZerosLike" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_88" + op: "ZerosLike" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_89" + op: "ZerosLike" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/ones" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/FusedBatchNormV3:3" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/FusedBatchNormV3:4" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_3/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_3/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_3/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_3/attention/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_3/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_3/attention/self/Reshape_3" + input: "gradients/bert/encoder/layer_3/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/Reshape_3_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/Reshape_3_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/attention/output/dense/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_3/attention/self/Reshape_3_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/transpose_3_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_3/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/transpose_3_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_3/attention/self/Reshape_3_grad/Reshape" + input: "gradients/bert/encoder/layer_3/attention/self/transpose_3_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/MatMul_1_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_3/attention/self/transpose_3_grad/transpose" + input: "bert/encoder/layer_3/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/MatMul_1_grad/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_3/attention/self/dropout/SelectV2" + input: "gradients/bert/encoder/layer_3/attention/self/transpose_3_grad/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_3/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_3/attention/self/MatMul_1_grad/MatMul" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_3/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_3/attention/self/MatMul_1_grad/MatMul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/transpose_2_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_3/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/transpose_2_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_3/attention/self/MatMul_1_grad/MatMul_1" + input: "gradients/bert/encoder/layer_3/attention/self/transpose_2_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_3/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_3/attention/self/Softmax" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/Reshape_2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/Reshape_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/attention/self/transpose_2_grad/transpose" + input: "gradients/bert/encoder/layer_3/attention/self/Reshape_2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/Softmax_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_3/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/Softmax_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: -1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/Softmax_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_3/attention/self/Softmax_grad/mul" + input: "gradients/bert/encoder/layer_3/attention/self/Softmax_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/Softmax_grad/sub" + op: "Sub" + input: "gradients/bert/encoder/layer_3/attention/self/dropout/Mul_grad/Mul" + input: "gradients/bert/encoder/layer_3/attention/self/Softmax_grad/Sum" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/Softmax_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_3/attention/self/Softmax_grad/sub" + input: "bert/encoder/layer_3/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/value/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_3/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/add_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_3/attention/self/Softmax_grad/mul_1" + input: "gradients/bert/encoder/layer_3/attention/self/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\001\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/attention/self/add_grad/Sum" + input: "gradients/bert/encoder/layer_3/attention/self/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/value/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_3/attention/self/Reshape_2_grad/Reshape" + input: "bert/encoder/layer_3/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/value/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_2/output/layer_normalization_6/add" + input: "gradients/bert/encoder/layer_3/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_3/attention/self/Softmax_grad/mul_1" + input: "bert/encoder/layer_3/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_3/attention/self/MatMul" + input: "gradients/bert/encoder/layer_3/attention/self/Softmax_grad/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_3/attention/self/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_3/attention/self/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/attention/self/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_3/attention/self/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/MatMul_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_3/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_3/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/MatMul_grad/MatMul_1" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_3/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_3/attention/self/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/transpose_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_3/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/transpose_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_3/attention/self/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_3/attention/self/transpose_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/transpose_1_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_3/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/transpose_1_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_3/attention/self/MatMul_grad/MatMul_1" + input: "gradients/bert/encoder/layer_3/attention/self/transpose_1_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/attention/self/transpose_grad/transpose" + input: "gradients/bert/encoder/layer_3/attention/self/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_3/attention/self/transpose_1_grad/transpose" + input: "gradients/bert/encoder/layer_3/attention/self/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/query/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_3/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/key/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_3/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/query/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_3/attention/self/Reshape_grad/Reshape" + input: "bert/encoder/layer_3/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/query/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_2/output/layer_normalization_6/add" + input: "gradients/bert/encoder/layer_3/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/key/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_3/attention/self/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_3/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_3/attention/self/key/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_2/output/layer_normalization_6/add" + input: "gradients/bert/encoder/layer_3/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_26" + op: "AddN" + input: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_3/attention/self/value/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_3/attention/self/query/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_3/attention/self/key/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/layer_normalization_6/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/layer_normalization_6/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_26" + input: "gradients/bert/encoder/layer_2/output/layer_normalization_6/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/layer_normalization_6/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/layer_normalization_6/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/output/layer_normalization_6/add_grad/Sum" + input: "gradients/bert/encoder/layer_2/output/layer_normalization_6/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/layer_normalization_6/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_26" + input: "bert/encoder/layer_2/output/layer_normalization_6/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/layer_normalization_6/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_2/output/layer_normalization_6/Reshape_1" + input: "gradients/AddN_26" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/layer_normalization_6/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/layer_normalization_6/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_2/output/layer_normalization_6/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_2/output/layer_normalization_6/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/layer_normalization_6/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/layer_normalization_6/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/output/layer_normalization_6/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_2/output/layer_normalization_6/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/layer_normalization_6/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/layer_normalization_6/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/output/layer_normalization_6/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_2/output/layer_normalization_6/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_90" + op: "ZerosLike" + input: "bert/encoder/layer_2/output/layer_normalization_6/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_91" + op: "ZerosLike" + input: "bert/encoder/layer_2/output/layer_normalization_6/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_92" + op: "ZerosLike" + input: "bert/encoder/layer_2/output/layer_normalization_6/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_93" + op: "ZerosLike" + input: "bert/encoder/layer_2/output/layer_normalization_6/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_94" + op: "ZerosLike" + input: "bert/encoder/layer_2/output/layer_normalization_6/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/layer_normalization_6/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_2/output/layer_normalization_6/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_2/output/layer_normalization_6/Reshape" + input: "bert/encoder/layer_2/output/layer_normalization_6/ones" + input: "bert/encoder/layer_2/output/layer_normalization_6/FusedBatchNormV3:3" + input: "bert/encoder/layer_2/output/layer_normalization_6/FusedBatchNormV3:4" + input: "bert/encoder/layer_2/output/layer_normalization_6/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/layer_normalization_6/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/layer_normalization_6/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/output/layer_normalization_6/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_2/output/layer_normalization_6/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_2/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_2/output/layer_normalization_6/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_2/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_2/output/layer_normalization_6/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_2/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_2/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_2/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_2/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_2/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_2/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_2/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_2/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_2/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_2/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_2/intermediate/dense/mul_3" + input: "gradients/bert/encoder/layer_2/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_3_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_2/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_2/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_3_grad/Mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_2/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_2/intermediate/dense/BiasAdd" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_2_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_3_grad/Mul_1" + input: "bert/encoder/layer_2/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_2_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_2_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_2/intermediate/dense/mul_2/x" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_3_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul_2" + } + } + } + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/add_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/add_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_2/intermediate/dense/add_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/add_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/add_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/add_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/intermediate/dense/add_1_grad/Sum" + input: "gradients/bert/encoder/layer_2/intermediate/dense/add_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Tanh_grad/TanhGrad" + op: "TanhGrad" + input: "bert/encoder/layer_2/intermediate/dense/Tanh" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_2_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Tanh" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_1_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Tanh_grad/TanhGrad" + input: "bert/encoder/layer_2/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_1_grad/Mul" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_1_grad/Sum" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_1_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_2/intermediate/dense/mul_1/x" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Tanh_grad/TanhGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_1_grad/Mul_1" + input: "bert/encoder/layer_2/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_grad/Mul" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_grad/Sum" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_2/intermediate/dense/mul/x" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_1_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_2/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/sub/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/sub" + op: "Sub" + input: "bert/encoder/layer_2/intermediate/dense/Pow/y" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/sub/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Pow" + op: "Pow" + input: "bert/encoder/layer_2/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/sub" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/mul" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Greater/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Greater" + op: "Greater" + input: "bert/encoder/layer_2/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Greater/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/ones_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/ones_like" + op: "Fill" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/ones_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Select" + op: "Select" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Greater" + input: "bert/encoder/layer_2/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/ones_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Log" + op: "Log" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Select" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/zeros_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/zeros_like" + op: "Fill" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/zeros_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Select_1" + op: "Select" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Greater" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Log" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/zeros_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/mul_2" + op: "Mul" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_2/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/mul_3" + op: "Mul" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/mul_2" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Select_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/mul_3" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Sum" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/AddN_27" + op: "AddN" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_3_grad/Mul" + input: "gradients/bert/encoder/layer_2/intermediate/dense/mul_1_grad/Mul_1" + input: "gradients/bert/encoder/layer_2/intermediate/dense/Pow_grad/mul_1" + attr { + key: "N" + value { + i: 3 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/AddN_27" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/AddN_27" + input: "bert/encoder/layer_2/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_2/intermediate/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/add" + input: "gradients/AddN_27" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_28" + op: "AddN" + input: "gradients/bert/encoder/layer_2/output/layer_normalization_6/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_2/intermediate/dense/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_28" + input: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/add_grad/Sum" + input: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_28" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape_1" + input: "gradients/AddN_28" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_95" + op: "ZerosLike" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_96" + op: "ZerosLike" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_97" + op: "ZerosLike" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_98" + op: "ZerosLike" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_99" + op: "ZerosLike" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/ones" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/FusedBatchNormV3:3" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/FusedBatchNormV3:4" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_2/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_2/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_2/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_2/attention/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_2/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_2/attention/self/Reshape_3" + input: "gradients/bert/encoder/layer_2/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/Reshape_3_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/Reshape_3_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/attention/output/dense/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_2/attention/self/Reshape_3_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/transpose_3_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_2/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/transpose_3_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_2/attention/self/Reshape_3_grad/Reshape" + input: "gradients/bert/encoder/layer_2/attention/self/transpose_3_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/MatMul_1_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_2/attention/self/transpose_3_grad/transpose" + input: "bert/encoder/layer_2/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/MatMul_1_grad/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_2/attention/self/dropout/SelectV2" + input: "gradients/bert/encoder/layer_2/attention/self/transpose_3_grad/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_2/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_2/attention/self/MatMul_1_grad/MatMul" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_2/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_2/attention/self/MatMul_1_grad/MatMul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/transpose_2_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_2/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/transpose_2_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_2/attention/self/MatMul_1_grad/MatMul_1" + input: "gradients/bert/encoder/layer_2/attention/self/transpose_2_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_2/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_2/attention/self/Softmax" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/Reshape_2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/Reshape_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/attention/self/transpose_2_grad/transpose" + input: "gradients/bert/encoder/layer_2/attention/self/Reshape_2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/Softmax_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_2/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/Softmax_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: -1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/Softmax_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_2/attention/self/Softmax_grad/mul" + input: "gradients/bert/encoder/layer_2/attention/self/Softmax_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/Softmax_grad/sub" + op: "Sub" + input: "gradients/bert/encoder/layer_2/attention/self/dropout/Mul_grad/Mul" + input: "gradients/bert/encoder/layer_2/attention/self/Softmax_grad/Sum" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/Softmax_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_2/attention/self/Softmax_grad/sub" + input: "bert/encoder/layer_2/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/value/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_2/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/add_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_2/attention/self/Softmax_grad/mul_1" + input: "gradients/bert/encoder/layer_2/attention/self/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\001\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/attention/self/add_grad/Sum" + input: "gradients/bert/encoder/layer_2/attention/self/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/value/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_2/attention/self/Reshape_2_grad/Reshape" + input: "bert/encoder/layer_2/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/value/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_1/output/layer_normalization_4/add" + input: "gradients/bert/encoder/layer_2/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_2/attention/self/Softmax_grad/mul_1" + input: "bert/encoder/layer_2/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_2/attention/self/MatMul" + input: "gradients/bert/encoder/layer_2/attention/self/Softmax_grad/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_2/attention/self/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_2/attention/self/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/attention/self/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_2/attention/self/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/MatMul_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_2/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_2/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/MatMul_grad/MatMul_1" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_2/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_2/attention/self/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/transpose_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_2/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/transpose_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_2/attention/self/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_2/attention/self/transpose_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/transpose_1_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_2/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/transpose_1_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_2/attention/self/MatMul_grad/MatMul_1" + input: "gradients/bert/encoder/layer_2/attention/self/transpose_1_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/attention/self/transpose_grad/transpose" + input: "gradients/bert/encoder/layer_2/attention/self/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_2/attention/self/transpose_1_grad/transpose" + input: "gradients/bert/encoder/layer_2/attention/self/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/query/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_2/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/key/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_2/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/query/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_2/attention/self/Reshape_grad/Reshape" + input: "bert/encoder/layer_2/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/query/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_1/output/layer_normalization_4/add" + input: "gradients/bert/encoder/layer_2/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/key/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_2/attention/self/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_2/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_2/attention/self/key/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_1/output/layer_normalization_4/add" + input: "gradients/bert/encoder/layer_2/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_29" + op: "AddN" + input: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_2/attention/self/value/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_2/attention/self/query/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_2/attention/self/key/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/layer_normalization_4/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/layer_normalization_4/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_29" + input: "gradients/bert/encoder/layer_1/output/layer_normalization_4/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/layer_normalization_4/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/layer_normalization_4/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/output/layer_normalization_4/add_grad/Sum" + input: "gradients/bert/encoder/layer_1/output/layer_normalization_4/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/layer_normalization_4/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_29" + input: "bert/encoder/layer_1/output/layer_normalization_4/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/layer_normalization_4/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_1/output/layer_normalization_4/Reshape_1" + input: "gradients/AddN_29" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/layer_normalization_4/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/layer_normalization_4/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_1/output/layer_normalization_4/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_1/output/layer_normalization_4/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/layer_normalization_4/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/layer_normalization_4/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/output/layer_normalization_4/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_1/output/layer_normalization_4/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/layer_normalization_4/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/layer_normalization_4/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/output/layer_normalization_4/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_1/output/layer_normalization_4/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_100" + op: "ZerosLike" + input: "bert/encoder/layer_1/output/layer_normalization_4/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_101" + op: "ZerosLike" + input: "bert/encoder/layer_1/output/layer_normalization_4/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_102" + op: "ZerosLike" + input: "bert/encoder/layer_1/output/layer_normalization_4/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_103" + op: "ZerosLike" + input: "bert/encoder/layer_1/output/layer_normalization_4/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_104" + op: "ZerosLike" + input: "bert/encoder/layer_1/output/layer_normalization_4/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/layer_normalization_4/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_1/output/layer_normalization_4/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_1/output/layer_normalization_4/Reshape" + input: "bert/encoder/layer_1/output/layer_normalization_4/ones" + input: "bert/encoder/layer_1/output/layer_normalization_4/FusedBatchNormV3:3" + input: "bert/encoder/layer_1/output/layer_normalization_4/FusedBatchNormV3:4" + input: "bert/encoder/layer_1/output/layer_normalization_4/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/layer_normalization_4/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/layer_normalization_4/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/output/layer_normalization_4/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_1/output/layer_normalization_4/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_1/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_1/output/layer_normalization_4/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_1/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_1/output/layer_normalization_4/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_1/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_1/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_1/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_1/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_1/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_1/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_1/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_1/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_1/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_1/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_1/intermediate/dense/mul_3" + input: "gradients/bert/encoder/layer_1/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_3_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_1/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_1/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_3_grad/Mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_1/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_1/intermediate/dense/BiasAdd" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_2_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_3_grad/Mul_1" + input: "bert/encoder/layer_1/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_2_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_2_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_1/intermediate/dense/mul_2/x" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_3_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul_2" + } + } + } + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/add_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/add_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_1/intermediate/dense/add_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/add_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/add_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/add_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/intermediate/dense/add_1_grad/Sum" + input: "gradients/bert/encoder/layer_1/intermediate/dense/add_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Tanh_grad/TanhGrad" + op: "TanhGrad" + input: "bert/encoder/layer_1/intermediate/dense/Tanh" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_2_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Tanh" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_1_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Tanh_grad/TanhGrad" + input: "bert/encoder/layer_1/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_1_grad/Mul" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_1_grad/Sum" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_1_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_1/intermediate/dense/mul_1/x" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Tanh_grad/TanhGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_1_grad/Mul_1" + input: "bert/encoder/layer_1/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_grad/Mul" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_grad/Sum" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_1/intermediate/dense/mul/x" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_1_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_1/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/sub/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/sub" + op: "Sub" + input: "bert/encoder/layer_1/intermediate/dense/Pow/y" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/sub/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Pow" + op: "Pow" + input: "bert/encoder/layer_1/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/sub" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/mul" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Greater/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Greater" + op: "Greater" + input: "bert/encoder/layer_1/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Greater/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/ones_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/ones_like" + op: "Fill" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/ones_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Select" + op: "Select" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Greater" + input: "bert/encoder/layer_1/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/ones_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Log" + op: "Log" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Select" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/zeros_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/zeros_like" + op: "Fill" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/zeros_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Select_1" + op: "Select" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Greater" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Log" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/zeros_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/mul_2" + op: "Mul" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_1/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/mul_3" + op: "Mul" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/mul_2" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Select_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/mul_3" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Sum" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/AddN_30" + op: "AddN" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_3_grad/Mul" + input: "gradients/bert/encoder/layer_1/intermediate/dense/mul_1_grad/Mul_1" + input: "gradients/bert/encoder/layer_1/intermediate/dense/Pow_grad/mul_1" + attr { + key: "N" + value { + i: 3 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/AddN_30" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/AddN_30" + input: "bert/encoder/layer_1/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_1/intermediate/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/add" + input: "gradients/AddN_30" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_31" + op: "AddN" + input: "gradients/bert/encoder/layer_1/output/layer_normalization_4/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_1/intermediate/dense/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_31" + input: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/add_grad/Sum" + input: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_31" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape_1" + input: "gradients/AddN_31" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_105" + op: "ZerosLike" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_106" + op: "ZerosLike" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_107" + op: "ZerosLike" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_108" + op: "ZerosLike" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_109" + op: "ZerosLike" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/ones" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/FusedBatchNormV3:3" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/FusedBatchNormV3:4" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_1/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_1/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_1/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_1/attention/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_1/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_1/attention/self/Reshape_3" + input: "gradients/bert/encoder/layer_1/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/Reshape_3_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/Reshape_3_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/attention/output/dense/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_1/attention/self/Reshape_3_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/transpose_3_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_1/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/transpose_3_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_1/attention/self/Reshape_3_grad/Reshape" + input: "gradients/bert/encoder/layer_1/attention/self/transpose_3_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/MatMul_1_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_1/attention/self/transpose_3_grad/transpose" + input: "bert/encoder/layer_1/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/MatMul_1_grad/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_1/attention/self/dropout/SelectV2" + input: "gradients/bert/encoder/layer_1/attention/self/transpose_3_grad/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_1/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_1/attention/self/MatMul_1_grad/MatMul" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_1/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_1/attention/self/MatMul_1_grad/MatMul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/transpose_2_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_1/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/transpose_2_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_1/attention/self/MatMul_1_grad/MatMul_1" + input: "gradients/bert/encoder/layer_1/attention/self/transpose_2_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_1/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_1/attention/self/Softmax" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/Reshape_2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/Reshape_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/attention/self/transpose_2_grad/transpose" + input: "gradients/bert/encoder/layer_1/attention/self/Reshape_2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/Softmax_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_1/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/Softmax_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: -1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/Softmax_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_1/attention/self/Softmax_grad/mul" + input: "gradients/bert/encoder/layer_1/attention/self/Softmax_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/Softmax_grad/sub" + op: "Sub" + input: "gradients/bert/encoder/layer_1/attention/self/dropout/Mul_grad/Mul" + input: "gradients/bert/encoder/layer_1/attention/self/Softmax_grad/Sum" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/Softmax_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_1/attention/self/Softmax_grad/sub" + input: "bert/encoder/layer_1/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/value/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_1/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/add_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_1/attention/self/Softmax_grad/mul_1" + input: "gradients/bert/encoder/layer_1/attention/self/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\001\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/attention/self/add_grad/Sum" + input: "gradients/bert/encoder/layer_1/attention/self/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/value/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_1/attention/self/Reshape_2_grad/Reshape" + input: "bert/encoder/layer_1/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/value/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_0/output/layer_normalization_2/add" + input: "gradients/bert/encoder/layer_1/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_1/attention/self/Softmax_grad/mul_1" + input: "bert/encoder/layer_1/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_1/attention/self/MatMul" + input: "gradients/bert/encoder/layer_1/attention/self/Softmax_grad/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_1/attention/self/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_1/attention/self/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/attention/self/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_1/attention/self/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/MatMul_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_1/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_1/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/MatMul_grad/MatMul_1" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_1/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_1/attention/self/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/transpose_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_1/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/transpose_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_1/attention/self/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_1/attention/self/transpose_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/transpose_1_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_1/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/transpose_1_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_1/attention/self/MatMul_grad/MatMul_1" + input: "gradients/bert/encoder/layer_1/attention/self/transpose_1_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/attention/self/transpose_grad/transpose" + input: "gradients/bert/encoder/layer_1/attention/self/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_1/attention/self/transpose_1_grad/transpose" + input: "gradients/bert/encoder/layer_1/attention/self/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/query/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_1/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/key/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_1/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/query/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_1/attention/self/Reshape_grad/Reshape" + input: "bert/encoder/layer_1/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/query/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_0/output/layer_normalization_2/add" + input: "gradients/bert/encoder/layer_1/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/key/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_1/attention/self/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_1/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_1/attention/self/key/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_0/output/layer_normalization_2/add" + input: "gradients/bert/encoder/layer_1/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_32" + op: "AddN" + input: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_1/attention/self/value/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_1/attention/self/query/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_1/attention/self/key/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/layer_normalization_2/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/layer_normalization_2/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_32" + input: "gradients/bert/encoder/layer_0/output/layer_normalization_2/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/layer_normalization_2/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/layer_normalization_2/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/output/layer_normalization_2/add_grad/Sum" + input: "gradients/bert/encoder/layer_0/output/layer_normalization_2/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/layer_normalization_2/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_32" + input: "bert/encoder/layer_0/output/layer_normalization_2/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/layer_normalization_2/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_0/output/layer_normalization_2/Reshape_1" + input: "gradients/AddN_32" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/layer_normalization_2/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/layer_normalization_2/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_0/output/layer_normalization_2/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_0/output/layer_normalization_2/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/layer_normalization_2/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/layer_normalization_2/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/output/layer_normalization_2/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_0/output/layer_normalization_2/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/layer_normalization_2/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/layer_normalization_2/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/output/layer_normalization_2/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_0/output/layer_normalization_2/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_110" + op: "ZerosLike" + input: "bert/encoder/layer_0/output/layer_normalization_2/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_111" + op: "ZerosLike" + input: "bert/encoder/layer_0/output/layer_normalization_2/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_112" + op: "ZerosLike" + input: "bert/encoder/layer_0/output/layer_normalization_2/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_113" + op: "ZerosLike" + input: "bert/encoder/layer_0/output/layer_normalization_2/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_114" + op: "ZerosLike" + input: "bert/encoder/layer_0/output/layer_normalization_2/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/layer_normalization_2/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_0/output/layer_normalization_2/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_0/output/layer_normalization_2/Reshape" + input: "bert/encoder/layer_0/output/layer_normalization_2/ones" + input: "bert/encoder/layer_0/output/layer_normalization_2/FusedBatchNormV3:3" + input: "bert/encoder/layer_0/output/layer_normalization_2/FusedBatchNormV3:4" + input: "bert/encoder/layer_0/output/layer_normalization_2/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/layer_normalization_2/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/layer_normalization_2/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/output/layer_normalization_2/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_0/output/layer_normalization_2/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_0/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_0/output/layer_normalization_2/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_0/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_0/output/layer_normalization_2/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_0/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_0/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_0/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_0/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_0/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_0/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_0/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_0/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_0/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_0/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_0/intermediate/dense/mul_3" + input: "gradients/bert/encoder/layer_0/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_3_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_0/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_0/intermediate/dense/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_3_grad/Mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_0/output/dense/MatMul_grad/MatMul" + input: "bert/encoder/layer_0/intermediate/dense/BiasAdd" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_2_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_3_grad/Mul_1" + input: "bert/encoder/layer_0/intermediate/dense/add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_2_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_2_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_0/intermediate/dense/mul_2/x" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_3_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul_2" + } + } + } + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/add_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/add_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_0/intermediate/dense/add_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/add_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/add_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/add_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/intermediate/dense/add_1_grad/Sum" + input: "gradients/bert/encoder/layer_0/intermediate/dense/add_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Tanh_grad/TanhGrad" + op: "TanhGrad" + input: "bert/encoder/layer_0/intermediate/dense/Tanh" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_2_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Tanh" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_1_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Tanh_grad/TanhGrad" + input: "bert/encoder/layer_0/intermediate/dense/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_1_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_1_grad/Mul" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_1_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_1_grad/Sum" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_1_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_1_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_0/intermediate/dense/mul_1/x" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Tanh_grad/TanhGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_1_grad/Mul_1" + input: "bert/encoder/layer_0/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_grad/Mul" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_grad/Sum" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_0/intermediate/dense/mul/x" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_1_grad/Mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_0/intermediate/dense/Pow/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/sub/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/sub" + op: "Sub" + input: "bert/encoder/layer_0/intermediate/dense/Pow/y" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/sub/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Pow" + op: "Pow" + input: "bert/encoder/layer_0/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/sub" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/mul" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Greater/y" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Greater" + op: "Greater" + input: "bert/encoder/layer_0/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Greater/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/ones_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/ones_like" + op: "Fill" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/ones_like/Shape/shape_as_tensor" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/ones_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Select" + op: "Select" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Greater" + input: "bert/encoder/layer_0/intermediate/dense/BiasAdd" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/ones_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Log" + op: "Log" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Select" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\014\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/zeros_like/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/zeros_like" + op: "Fill" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/zeros_like/shape_as_tensor" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/zeros_like/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Select_1" + op: "Select" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Greater" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Log" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/zeros_like" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/mul_2" + op: "Mul" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_grad/Mul_1" + input: "bert/encoder/layer_0/intermediate/dense/Pow" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/mul_3" + op: "Mul" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/mul_2" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Select_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/mul_3" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Sum" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/Pow" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/AddN_33" + op: "AddN" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_3_grad/Mul" + input: "gradients/bert/encoder/layer_0/intermediate/dense/mul_1_grad/Mul_1" + input: "gradients/bert/encoder/layer_0/intermediate/dense/Pow_grad/mul_1" + attr { + key: "N" + value { + i: 3 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/AddN_33" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/AddN_33" + input: "bert/encoder/layer_0/intermediate/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_0/intermediate/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/add" + input: "gradients/AddN_33" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_34" + op: "AddN" + input: "gradients/bert/encoder/layer_0/output/layer_normalization_2/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_0/intermediate/dense/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/add_grad/Sum" + op: "Sum" + input: "gradients/AddN_34" + input: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/add_grad/Sum" + input: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2_grad/Mul" + op: "Mul" + input: "gradients/AddN_34" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape_1" + input: "gradients/AddN_34" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2_grad/Mul_1" + input: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2_grad/Sum" + input: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2_grad/Mul" + input: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_115" + op: "ZerosLike" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_116" + op: "ZerosLike" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_117" + op: "ZerosLike" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_118" + op: "ZerosLike" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_119" + op: "ZerosLike" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/ones" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/FusedBatchNormV3:3" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/FusedBatchNormV3:4" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_0/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_0/attention/output/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_0/attention/output/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_0/attention/output/dense/BiasAdd" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dense/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dense/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_0/attention/output/dense/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/output/dense/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/layer_0/attention/self/Reshape_3" + input: "gradients/bert/encoder/layer_0/attention/output/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/Reshape_3_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\014\000\000\000@\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/Reshape_3_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/attention/output/dense/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_0/attention/self/Reshape_3_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/transpose_3_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_0/attention/self/transpose_3/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/transpose_3_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_0/attention/self/Reshape_3_grad/Reshape" + input: "gradients/bert/encoder/layer_0/attention/self/transpose_3_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/transpose_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/MatMul_1_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_0/attention/self/transpose_3_grad/transpose" + input: "bert/encoder/layer_0/attention/self/transpose_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/MatMul_1_grad/MatMul_1" + op: "BatchMatMulV2" + input: "bert/encoder/layer_0/attention/self/dropout/SelectV2" + input: "gradients/bert/encoder/layer_0/attention/self/transpose_3_grad/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/MatMul_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/encoder/layer_0/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_0/attention/self/MatMul_1_grad/MatMul" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\014\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/Shape" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/Sum" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/encoder/layer_0/attention/self/dropout/GreaterEqual" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/layer_0/attention/self/MatMul_1_grad/MatMul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/transpose_2_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_0/attention/self/transpose_2/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/transpose_2_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_0/attention/self/MatMul_1_grad/MatMul_1" + input: "gradients/bert/encoder/layer_0/attention/self/transpose_2_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/transpose_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/Reshape" + input: "bert/encoder/layer_0/attention/self/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_0/attention/self/Softmax" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/Reshape_2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/Reshape_2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/attention/self/transpose_2_grad/transpose" + input: "gradients/bert/encoder/layer_0/attention/self/Reshape_2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/Reshape_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/Softmax_grad/mul" + op: "Mul" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/Mul_grad/Mul" + input: "bert/encoder/layer_0/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/Softmax_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: -1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/Softmax_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_0/attention/self/Softmax_grad/mul" + input: "gradients/bert/encoder/layer_0/attention/self/Softmax_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/Softmax_grad/sub" + op: "Sub" + input: "gradients/bert/encoder/layer_0/attention/self/dropout/Mul_grad/Mul" + input: "gradients/bert/encoder/layer_0/attention/self/Softmax_grad/Sum" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/Softmax_grad/mul_1" + op: "Mul" + input: "gradients/bert/encoder/layer_0/attention/self/Softmax_grad/sub" + input: "bert/encoder/layer_0/attention/self/Softmax" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/Softmax" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/value/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_0/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/add_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_0/attention/self/Softmax_grad/mul_1" + input: "gradients/bert/encoder/layer_0/attention/self/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\003\000\000\000\001\000\000\000\200\001\000\000\200\001\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/attention/self/add_grad/Sum" + input: "gradients/bert/encoder/layer_0/attention/self/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/value/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_0/attention/self/Reshape_2_grad/Reshape" + input: "bert/encoder/layer_0/attention/self/value/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/value/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/Reshape_1" + input: "gradients/bert/encoder/layer_0/attention/self/Reshape_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/encoder/layer_0/attention/self/Softmax_grad/mul_1" + input: "bert/encoder/layer_0/attention/self/Mul/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/Mul_grad/Mul_1" + op: "Mul" + input: "bert/encoder/layer_0/attention/self/MatMul" + input: "gradients/bert/encoder/layer_0/attention/self/Softmax_grad/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 384 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000\003\000\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/encoder/layer_0/attention/self/Mul_grad/Mul_1" + input: "gradients/bert/encoder/layer_0/attention/self/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/attention/self/Mul_grad/Sum" + input: "gradients/bert/encoder/layer_0/attention/self/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/MatMul_grad/MatMul" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_0/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_0/attention/self/transpose_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: false + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/MatMul_grad/MatMul_1" + op: "BatchMatMulV2" + input: "gradients/bert/encoder/layer_0/attention/self/Mul_grad/Mul" + input: "bert/encoder/layer_0/attention/self/transpose" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 12 + } + dim { + size: 384 + } + dim { + size: 64 + } + } + } + } + } + attr { + key: "adj_x" + value { + b: true + } + } + attr { + key: "adj_y" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/transpose_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_0/attention/self/transpose/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/transpose_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_0/attention/self/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_0/attention/self/transpose_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/transpose" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/transpose_1_grad/InvertPermutation" + op: "InvertPermutation" + input: "bert/encoder/layer_0/attention/self/transpose_1/perm" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/transpose_1_grad/transpose" + op: "Transpose" + input: "gradients/bert/encoder/layer_0/attention/self/MatMul_grad/MatMul_1" + input: "gradients/bert/encoder/layer_0/attention/self/transpose_1_grad/InvertPermutation" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tperm" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/transpose_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 12 + } + dim { + size: 64 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/attention/self/transpose_grad/transpose" + input: "gradients/bert/encoder/layer_0/attention/self/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/encoder/layer_0/attention/self/transpose_1_grad/transpose" + input: "gradients/bert/encoder/layer_0/attention/self/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/query/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_0/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/key/BiasAdd_grad/BiasAddGrad" + op: "BiasAddGrad" + input: "gradients/bert/encoder/layer_0/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/query/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_0/attention/self/Reshape_grad/Reshape" + input: "bert/encoder/layer_0/attention/self/query/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/query/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/Reshape_1" + input: "gradients/bert/encoder/layer_0/attention/self/Reshape_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/key/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/encoder/layer_0/attention/self/Reshape_1_grad/Reshape" + input: "bert/encoder/layer_0/attention/self/key/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/encoder/layer_0/attention/self/key/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/encoder/Reshape_1" + input: "gradients/bert/encoder/layer_0/attention/self/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/AddN_35" + op: "AddN" + input: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape_grad/Reshape" + input: "gradients/bert/encoder/layer_0/attention/self/value/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_0/attention/self/query/MatMul_grad/MatMul" + input: "gradients/bert/encoder/layer_0/attention/self/key/MatMul_grad/MatMul" + attr { + key: "N" + value { + i: 4 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/encoder/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/encoder/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/AddN_35" + input: "gradients/bert/encoder/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/dropout/SelectV2_grad/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "gradients/bert/embeddings/dropout/SelectV2_grad/SelectV2" + op: "SelectV2" + input: "bert/embeddings/dropout/GreaterEqual" + input: "gradients/bert/encoder/Reshape_1_grad/Reshape" + input: "gradients/bert/embeddings/dropout/SelectV2_grad/zeros" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/dropout/SelectV2_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/embeddings/dropout/SelectV2_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/embeddings/dropout/SelectV2_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/embeddings/dropout/SelectV2_grad/Shape" + input: "gradients/bert/embeddings/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/dropout/SelectV2_grad/Sum" + op: "Sum" + input: "gradients/bert/embeddings/dropout/SelectV2_grad/SelectV2" + input: "gradients/bert/embeddings/dropout/SelectV2_grad/BroadcastGradientArgs" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/embeddings/dropout/SelectV2_grad/Reshape" + op: "Reshape" + input: "gradients/bert/embeddings/dropout/SelectV2_grad/Sum" + input: "gradients/bert/embeddings/dropout/SelectV2_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/dropout/SelectV2_grad/SelectV2_1" + op: "SelectV2" + input: "bert/embeddings/dropout/GreaterEqual" + input: "gradients/bert/embeddings/dropout/SelectV2_grad/zeros" + input: "gradients/bert/encoder/Reshape_1_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/dropout/SelectV2_grad/Shape_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/dropout/SelectV2_grad/BroadcastGradientArgs_1" + op: "BroadcastGradientArgs" + input: "gradients/bert/embeddings/dropout/SelectV2_grad/Shape_2" + input: "gradients/bert/embeddings/dropout/SelectV2_grad/Shape_1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/dropout/SelectV2_grad/Sum_1" + op: "Sum" + input: "gradients/bert/embeddings/dropout/SelectV2_grad/SelectV2_1" + input: "gradients/bert/embeddings/dropout/SelectV2_grad/BroadcastGradientArgs_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: true + } + } +} +node { + name: "gradients/bert/embeddings/dropout/SelectV2_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/embeddings/dropout/SelectV2_grad/Sum_1" + input: "gradients/bert/embeddings/dropout/SelectV2_grad/Shape_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/SelectV2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/embeddings/dropout/Mul_grad/BroadcastGradientArgs/s0" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/embeddings/dropout/Mul_grad/BroadcastGradientArgs/s1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/dropout/Mul_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/embeddings/dropout/Mul_grad/BroadcastGradientArgs/s0" + input: "gradients/bert/embeddings/dropout/Mul_grad/BroadcastGradientArgs/s1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/dropout/Mul_grad/Mul" + op: "Mul" + input: "gradients/bert/embeddings/dropout/SelectV2_grad/Reshape" + input: "bert/embeddings/dropout/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/dropout/Mul_grad/Mul_1" + op: "Mul" + input: "bert/embeddings/layer_normalization/add" + input: "gradients/bert/embeddings/dropout/SelectV2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/dropout/Mul_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\000\000\000\000\001\000\000\000\002\000\000\000" + } + } + } +} +node { + name: "gradients/bert/embeddings/dropout/Mul_grad/Sum" + op: "Sum" + input: "gradients/bert/embeddings/dropout/Mul_grad/Mul_1" + input: "gradients/bert/embeddings/dropout/Mul_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/embeddings/dropout/Mul_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/dropout/Mul_grad/Reshape/shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/dropout/Mul_grad/Reshape" + op: "Reshape" + input: "gradients/bert/embeddings/dropout/Mul_grad/Sum" + input: "gradients/bert/embeddings/dropout/Mul_grad/Reshape/shape_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/dropout/Mul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "gradients/bert/embeddings/layer_normalization/add_grad/BroadcastGradientArgs/s0" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/embeddings/layer_normalization/add_grad/BroadcastGradientArgs/s1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/embeddings/layer_normalization/add_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/embeddings/layer_normalization/add_grad/BroadcastGradientArgs/s0" + input: "gradients/bert/embeddings/layer_normalization/add_grad/BroadcastGradientArgs/s1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/layer_normalization/add_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/embeddings/layer_normalization/add_grad/Sum" + op: "Sum" + input: "gradients/bert/embeddings/dropout/Mul_grad/Mul" + input: "gradients/bert/embeddings/layer_normalization/add_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/embeddings/layer_normalization/add_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/embeddings/layer_normalization/add_grad/Reshape" + op: "Reshape" + input: "gradients/bert/embeddings/layer_normalization/add_grad/Sum" + input: "gradients/bert/embeddings/layer_normalization/add_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/layer_normalization/mul_3_grad/Mul" + op: "Mul" + input: "gradients/bert/embeddings/dropout/Mul_grad/Mul" + input: "bert/embeddings/layer_normalization/mul_3/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/layer_normalization/mul_3_grad/Mul_1" + op: "Mul" + input: "bert/embeddings/layer_normalization/Reshape_1" + input: "gradients/bert/embeddings/dropout/Mul_grad/Mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/layer_normalization/mul_3_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\000\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/embeddings/layer_normalization/mul_3_grad/Sum" + op: "Sum" + input: "gradients/bert/embeddings/layer_normalization/mul_3_grad/Mul_1" + input: "gradients/bert/embeddings/layer_normalization/mul_3_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/embeddings/layer_normalization/mul_3_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 768 + } + } + } +} +node { + name: "gradients/bert/embeddings/layer_normalization/mul_3_grad/Reshape" + op: "Reshape" + input: "gradients/bert/embeddings/layer_normalization/mul_3_grad/Sum" + input: "gradients/bert/embeddings/layer_normalization/mul_3_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/layer_normalization/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 4 + } + } + tensor_content: "\001\000\000\000\200\004\000\000\000\003\000\000\001\000\000\000" + } + } + } +} +node { + name: "gradients/bert/embeddings/layer_normalization/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/embeddings/layer_normalization/mul_3_grad/Mul" + input: "gradients/bert/embeddings/layer_normalization/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_120" + op: "ZerosLike" + input: "bert/embeddings/layer_normalization/FusedBatchNormV3:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_121" + op: "ZerosLike" + input: "bert/embeddings/layer_normalization/FusedBatchNormV3:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_122" + op: "ZerosLike" + input: "bert/embeddings/layer_normalization/FusedBatchNormV3:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_123" + op: "ZerosLike" + input: "bert/embeddings/layer_normalization/FusedBatchNormV3:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "gradients/zeros_like_124" + op: "ZerosLike" + input: "bert/embeddings/layer_normalization/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "gradients/bert/embeddings/layer_normalization/FusedBatchNormV3_grad/FusedBatchNormGradV3" + op: "FusedBatchNormGradV3" + input: "gradients/bert/embeddings/layer_normalization/Reshape_1_grad/Reshape" + input: "bert/embeddings/layer_normalization/Reshape" + input: "bert/embeddings/layer_normalization/ones" + input: "bert/embeddings/layer_normalization/FusedBatchNormV3:3" + input: "bert/embeddings/layer_normalization/FusedBatchNormV3:4" + input: "bert/embeddings/layer_normalization/FusedBatchNormV3:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "U" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/FusedBatchNormV3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 1152 + } + dim { + size: 768 + } + dim { + size: 1 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + size: 1152 + } + } + shape { + dim { + } + } + shape { + dim { + } + } + } + } + } + attr { + key: "data_format" + value { + s: "NCHW" + } + } + attr { + key: "epsilon" + value { + f: 0.0010000000474974513 + } + } + attr { + key: "is_training" + value { + b: true + } + } +} +node { + name: "gradients/bert/embeddings/layer_normalization/Reshape_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/embeddings/layer_normalization/Reshape_grad/Reshape" + op: "Reshape" + input: "gradients/bert/embeddings/layer_normalization/FusedBatchNormV3_grad/FusedBatchNormGradV3" + input: "gradients/bert/embeddings/layer_normalization/Reshape_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/Reshape" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/add_1_grad/BroadcastGradientArgs/s0" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\003\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/embeddings/add_1_grad/BroadcastGradientArgs/s1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\001\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/embeddings/add_1_grad/BroadcastGradientArgs" + op: "BroadcastGradientArgs" + input: "gradients/bert/embeddings/add_1_grad/BroadcastGradientArgs/s0" + input: "gradients/bert/embeddings/add_1_grad/BroadcastGradientArgs/s1" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/add_1_grad/Sum/reduction_indices" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/embeddings/add_1_grad/Sum" + op: "Sum" + input: "gradients/bert/embeddings/layer_normalization/Reshape_grad/Reshape" + input: "gradients/bert/embeddings/add_1_grad/Sum/reduction_indices" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "gradients/bert/embeddings/add_1_grad/Reshape/shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 3 + } + } + tensor_content: "\001\000\000\000\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/embeddings/add_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/embeddings/add_1_grad/Sum" + input: "gradients/bert/embeddings/add_1_grad/Reshape/shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/add_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/Reshape_4_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Reshape_4" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/embeddings/Reshape_4_grad/Reshape" + op: "Reshape" + input: "gradients/bert/embeddings/add_1_grad/Reshape" + input: "gradients/bert/embeddings/Reshape_4_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Reshape_4" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 384 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/Reshape_1_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/embeddings/Reshape_1_grad/Reshape" + op: "Reshape" + input: "gradients/bert/embeddings/layer_normalization/Reshape_grad/Reshape" + input: "gradients/bert/embeddings/Reshape_1_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Reshape_1" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/Reshape_3_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\004\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/embeddings/Reshape_3_grad/Reshape" + op: "Reshape" + input: "gradients/bert/embeddings/layer_normalization/Reshape_grad/Reshape" + input: "gradients/bert/embeddings/Reshape_3_grad/Shape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Reshape_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/Slice_grad/Rank" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Slice" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 2 + } + } + } +} +node { + name: "gradients/bert/embeddings/Slice_grad/Shape" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Slice" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\200\001\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/embeddings/Slice_grad/stack/1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Slice" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "gradients/bert/embeddings/Slice_grad/stack" + op: "Pack" + input: "gradients/bert/embeddings/Slice_grad/Rank" + input: "gradients/bert/embeddings/Slice_grad/stack/1" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Slice" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "gradients/bert/embeddings/Slice_grad/Reshape" + op: "Reshape" + input: "bert/embeddings/Slice/begin" + input: "gradients/bert/embeddings/Slice_grad/stack" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Slice" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/Slice_grad/Shape_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Slice" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\002\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/embeddings/Slice_grad/sub" + op: "Sub" + input: "gradients/bert/embeddings/Slice_grad/Shape_1" + input: "gradients/bert/embeddings/Slice_grad/Shape" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Slice" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/Slice_grad/sub_1" + op: "Sub" + input: "gradients/bert/embeddings/Slice_grad/sub" + input: "bert/embeddings/Slice/begin" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Slice" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/Slice_grad/Reshape_1" + op: "Reshape" + input: "gradients/bert/embeddings/Slice_grad/sub_1" + input: "gradients/bert/embeddings/Slice_grad/stack" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Slice" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/Slice_grad/concat/axis" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Slice" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "gradients/bert/embeddings/Slice_grad/concat" + op: "ConcatV2" + input: "gradients/bert/embeddings/Slice_grad/Reshape" + input: "gradients/bert/embeddings/Slice_grad/Reshape_1" + input: "gradients/bert/embeddings/Slice_grad/concat/axis" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Slice" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 2 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/Slice_grad/Pad" + op: "Pad" + input: "gradients/bert/embeddings/Reshape_4_grad/Reshape" + input: "gradients/bert/embeddings/Slice_grad/concat" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tpaddings" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Slice" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/MatMul_grad/MatMul" + op: "MatMul" + input: "gradients/bert/embeddings/Reshape_3_grad/Reshape" + input: "bert/embeddings/MatMul/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 2 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: true + } + } +} +node { + name: "gradients/bert/embeddings/MatMul_grad/MatMul_1" + op: "MatMul" + input: "bert/embeddings/one_hot" + input: "gradients/bert/embeddings/Reshape_3_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "transpose_a" + value { + b: true + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "gradients/bert/embeddings/Gather_grad/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Gather" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: ":w\000\000\000\003\000\000" + } + } + } +} +node { + name: "gradients/bert/embeddings/Gather_grad/Size" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Gather" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1152 + } + } + } +} +node { + name: "gradients/bert/embeddings/Gather_grad/ExpandDims/dim" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Gather" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/embeddings/Gather_grad/ExpandDims" + op: "ExpandDims" + input: "gradients/bert/embeddings/Gather_grad/Size" + input: "gradients/bert/embeddings/Gather_grad/ExpandDims/dim" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "Tdim" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Gather" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/Gather_grad/strided_slice/stack" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Gather" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "gradients/bert/embeddings/Gather_grad/strided_slice/stack_1" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Gather" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/embeddings/Gather_grad/strided_slice/stack_2" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Gather" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "gradients/bert/embeddings/Gather_grad/strided_slice" + op: "StridedSlice" + input: "gradients/bert/embeddings/Gather_grad/Const" + input: "gradients/bert/embeddings/Gather_grad/strided_slice/stack" + input: "gradients/bert/embeddings/Gather_grad/strided_slice/stack_1" + input: "gradients/bert/embeddings/Gather_grad/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Gather" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 1 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 0 + } + } +} +node { + name: "gradients/bert/embeddings/Gather_grad/concat/axis" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Gather" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 0 + } + } + } +} +node { + name: "gradients/bert/embeddings/Gather_grad/concat" + op: "ConcatV2" + input: "gradients/bert/embeddings/Gather_grad/ExpandDims" + input: "gradients/bert/embeddings/Gather_grad/strided_slice" + input: "gradients/bert/embeddings/Gather_grad/concat/axis" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Gather" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/Gather_grad/Reshape" + op: "Reshape" + input: "gradients/bert/embeddings/Reshape_1_grad/Reshape" + input: "gradients/bert/embeddings/Gather_grad/concat" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Gather" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "gradients/bert/embeddings/Gather_grad/Reshape_1" + op: "Reshape" + input: "bert/embeddings/Reshape" + input: "gradients/bert/embeddings/Gather_grad/ExpandDims" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Gather" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + } + } + } + } +} +node { + name: "global_norm/L2Loss" + op: "L2Loss" + input: "gradients/bert/embeddings/Gather_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Gather" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_1" + op: "L2Loss" + input: "gradients/bert/embeddings/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_2" + op: "L2Loss" + input: "gradients/bert/embeddings/Slice_grad/Pad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Slice" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_3" + op: "L2Loss" + input: "gradients/bert/embeddings/layer_normalization/mul_3_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_4" + op: "L2Loss" + input: "gradients/bert/embeddings/layer_normalization/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_5" + op: "L2Loss" + input: "gradients/bert/encoder/layer_0/attention/self/query/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_6" + op: "L2Loss" + input: "gradients/bert/encoder/layer_0/attention/self/query/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_7" + op: "L2Loss" + input: "gradients/bert/encoder/layer_0/attention/self/key/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_8" + op: "L2Loss" + input: "gradients/bert/encoder/layer_0/attention/self/key/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_9" + op: "L2Loss" + input: "gradients/bert/encoder/layer_0/attention/self/value/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_10" + op: "L2Loss" + input: "gradients/bert/encoder/layer_0/attention/self/value/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_11" + op: "L2Loss" + input: "gradients/bert/encoder/layer_0/attention/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_12" + op: "L2Loss" + input: "gradients/bert/encoder/layer_0/attention/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_13" + op: "L2Loss" + input: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_14" + op: "L2Loss" + input: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_15" + op: "L2Loss" + input: "gradients/bert/encoder/layer_0/intermediate/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_16" + op: "L2Loss" + input: "gradients/bert/encoder/layer_0/intermediate/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_17" + op: "L2Loss" + input: "gradients/bert/encoder/layer_0/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_18" + op: "L2Loss" + input: "gradients/bert/encoder/layer_0/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_19" + op: "L2Loss" + input: "gradients/bert/encoder/layer_0/output/layer_normalization_2/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_20" + op: "L2Loss" + input: "gradients/bert/encoder/layer_0/output/layer_normalization_2/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_21" + op: "L2Loss" + input: "gradients/bert/encoder/layer_1/attention/self/query/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_22" + op: "L2Loss" + input: "gradients/bert/encoder/layer_1/attention/self/query/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_23" + op: "L2Loss" + input: "gradients/bert/encoder/layer_1/attention/self/key/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_24" + op: "L2Loss" + input: "gradients/bert/encoder/layer_1/attention/self/key/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_25" + op: "L2Loss" + input: "gradients/bert/encoder/layer_1/attention/self/value/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_26" + op: "L2Loss" + input: "gradients/bert/encoder/layer_1/attention/self/value/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_27" + op: "L2Loss" + input: "gradients/bert/encoder/layer_1/attention/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_28" + op: "L2Loss" + input: "gradients/bert/encoder/layer_1/attention/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_29" + op: "L2Loss" + input: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_30" + op: "L2Loss" + input: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_31" + op: "L2Loss" + input: "gradients/bert/encoder/layer_1/intermediate/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_32" + op: "L2Loss" + input: "gradients/bert/encoder/layer_1/intermediate/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_33" + op: "L2Loss" + input: "gradients/bert/encoder/layer_1/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_34" + op: "L2Loss" + input: "gradients/bert/encoder/layer_1/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_35" + op: "L2Loss" + input: "gradients/bert/encoder/layer_1/output/layer_normalization_4/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_36" + op: "L2Loss" + input: "gradients/bert/encoder/layer_1/output/layer_normalization_4/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_37" + op: "L2Loss" + input: "gradients/bert/encoder/layer_2/attention/self/query/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_38" + op: "L2Loss" + input: "gradients/bert/encoder/layer_2/attention/self/query/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_39" + op: "L2Loss" + input: "gradients/bert/encoder/layer_2/attention/self/key/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_40" + op: "L2Loss" + input: "gradients/bert/encoder/layer_2/attention/self/key/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_41" + op: "L2Loss" + input: "gradients/bert/encoder/layer_2/attention/self/value/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_42" + op: "L2Loss" + input: "gradients/bert/encoder/layer_2/attention/self/value/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_43" + op: "L2Loss" + input: "gradients/bert/encoder/layer_2/attention/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_44" + op: "L2Loss" + input: "gradients/bert/encoder/layer_2/attention/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_45" + op: "L2Loss" + input: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_46" + op: "L2Loss" + input: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_47" + op: "L2Loss" + input: "gradients/bert/encoder/layer_2/intermediate/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_48" + op: "L2Loss" + input: "gradients/bert/encoder/layer_2/intermediate/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_49" + op: "L2Loss" + input: "gradients/bert/encoder/layer_2/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_50" + op: "L2Loss" + input: "gradients/bert/encoder/layer_2/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_51" + op: "L2Loss" + input: "gradients/bert/encoder/layer_2/output/layer_normalization_6/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_52" + op: "L2Loss" + input: "gradients/bert/encoder/layer_2/output/layer_normalization_6/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_53" + op: "L2Loss" + input: "gradients/bert/encoder/layer_3/attention/self/query/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_54" + op: "L2Loss" + input: "gradients/bert/encoder/layer_3/attention/self/query/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_55" + op: "L2Loss" + input: "gradients/bert/encoder/layer_3/attention/self/key/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_56" + op: "L2Loss" + input: "gradients/bert/encoder/layer_3/attention/self/key/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_57" + op: "L2Loss" + input: "gradients/bert/encoder/layer_3/attention/self/value/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_58" + op: "L2Loss" + input: "gradients/bert/encoder/layer_3/attention/self/value/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_59" + op: "L2Loss" + input: "gradients/bert/encoder/layer_3/attention/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_60" + op: "L2Loss" + input: "gradients/bert/encoder/layer_3/attention/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_61" + op: "L2Loss" + input: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_62" + op: "L2Loss" + input: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_63" + op: "L2Loss" + input: "gradients/bert/encoder/layer_3/intermediate/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_64" + op: "L2Loss" + input: "gradients/bert/encoder/layer_3/intermediate/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_65" + op: "L2Loss" + input: "gradients/bert/encoder/layer_3/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_66" + op: "L2Loss" + input: "gradients/bert/encoder/layer_3/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_67" + op: "L2Loss" + input: "gradients/bert/encoder/layer_3/output/layer_normalization_8/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_68" + op: "L2Loss" + input: "gradients/bert/encoder/layer_3/output/layer_normalization_8/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_69" + op: "L2Loss" + input: "gradients/bert/encoder/layer_4/attention/self/query/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_70" + op: "L2Loss" + input: "gradients/bert/encoder/layer_4/attention/self/query/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_71" + op: "L2Loss" + input: "gradients/bert/encoder/layer_4/attention/self/key/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_72" + op: "L2Loss" + input: "gradients/bert/encoder/layer_4/attention/self/key/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_73" + op: "L2Loss" + input: "gradients/bert/encoder/layer_4/attention/self/value/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_74" + op: "L2Loss" + input: "gradients/bert/encoder/layer_4/attention/self/value/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_75" + op: "L2Loss" + input: "gradients/bert/encoder/layer_4/attention/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_76" + op: "L2Loss" + input: "gradients/bert/encoder/layer_4/attention/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_77" + op: "L2Loss" + input: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_78" + op: "L2Loss" + input: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_79" + op: "L2Loss" + input: "gradients/bert/encoder/layer_4/intermediate/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_80" + op: "L2Loss" + input: "gradients/bert/encoder/layer_4/intermediate/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_81" + op: "L2Loss" + input: "gradients/bert/encoder/layer_4/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_82" + op: "L2Loss" + input: "gradients/bert/encoder/layer_4/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_83" + op: "L2Loss" + input: "gradients/bert/encoder/layer_4/output/layer_normalization_10/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_84" + op: "L2Loss" + input: "gradients/bert/encoder/layer_4/output/layer_normalization_10/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_85" + op: "L2Loss" + input: "gradients/bert/encoder/layer_5/attention/self/query/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_86" + op: "L2Loss" + input: "gradients/bert/encoder/layer_5/attention/self/query/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_87" + op: "L2Loss" + input: "gradients/bert/encoder/layer_5/attention/self/key/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_88" + op: "L2Loss" + input: "gradients/bert/encoder/layer_5/attention/self/key/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_89" + op: "L2Loss" + input: "gradients/bert/encoder/layer_5/attention/self/value/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_90" + op: "L2Loss" + input: "gradients/bert/encoder/layer_5/attention/self/value/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_91" + op: "L2Loss" + input: "gradients/bert/encoder/layer_5/attention/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_92" + op: "L2Loss" + input: "gradients/bert/encoder/layer_5/attention/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_93" + op: "L2Loss" + input: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_94" + op: "L2Loss" + input: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_95" + op: "L2Loss" + input: "gradients/bert/encoder/layer_5/intermediate/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_96" + op: "L2Loss" + input: "gradients/bert/encoder/layer_5/intermediate/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_97" + op: "L2Loss" + input: "gradients/bert/encoder/layer_5/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_98" + op: "L2Loss" + input: "gradients/bert/encoder/layer_5/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_99" + op: "L2Loss" + input: "gradients/bert/encoder/layer_5/output/layer_normalization_12/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_100" + op: "L2Loss" + input: "gradients/bert/encoder/layer_5/output/layer_normalization_12/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_101" + op: "L2Loss" + input: "gradients/bert/encoder/layer_6/attention/self/query/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_102" + op: "L2Loss" + input: "gradients/bert/encoder/layer_6/attention/self/query/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_103" + op: "L2Loss" + input: "gradients/bert/encoder/layer_6/attention/self/key/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_104" + op: "L2Loss" + input: "gradients/bert/encoder/layer_6/attention/self/key/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_105" + op: "L2Loss" + input: "gradients/bert/encoder/layer_6/attention/self/value/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_106" + op: "L2Loss" + input: "gradients/bert/encoder/layer_6/attention/self/value/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_107" + op: "L2Loss" + input: "gradients/bert/encoder/layer_6/attention/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_108" + op: "L2Loss" + input: "gradients/bert/encoder/layer_6/attention/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_109" + op: "L2Loss" + input: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_110" + op: "L2Loss" + input: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_111" + op: "L2Loss" + input: "gradients/bert/encoder/layer_6/intermediate/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_112" + op: "L2Loss" + input: "gradients/bert/encoder/layer_6/intermediate/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_113" + op: "L2Loss" + input: "gradients/bert/encoder/layer_6/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_114" + op: "L2Loss" + input: "gradients/bert/encoder/layer_6/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_115" + op: "L2Loss" + input: "gradients/bert/encoder/layer_6/output/layer_normalization_14/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_116" + op: "L2Loss" + input: "gradients/bert/encoder/layer_6/output/layer_normalization_14/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_117" + op: "L2Loss" + input: "gradients/bert/encoder/layer_7/attention/self/query/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_118" + op: "L2Loss" + input: "gradients/bert/encoder/layer_7/attention/self/query/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_119" + op: "L2Loss" + input: "gradients/bert/encoder/layer_7/attention/self/key/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_120" + op: "L2Loss" + input: "gradients/bert/encoder/layer_7/attention/self/key/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_121" + op: "L2Loss" + input: "gradients/bert/encoder/layer_7/attention/self/value/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_122" + op: "L2Loss" + input: "gradients/bert/encoder/layer_7/attention/self/value/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_123" + op: "L2Loss" + input: "gradients/bert/encoder/layer_7/attention/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_124" + op: "L2Loss" + input: "gradients/bert/encoder/layer_7/attention/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_125" + op: "L2Loss" + input: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_126" + op: "L2Loss" + input: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_127" + op: "L2Loss" + input: "gradients/bert/encoder/layer_7/intermediate/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_128" + op: "L2Loss" + input: "gradients/bert/encoder/layer_7/intermediate/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_129" + op: "L2Loss" + input: "gradients/bert/encoder/layer_7/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_130" + op: "L2Loss" + input: "gradients/bert/encoder/layer_7/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_131" + op: "L2Loss" + input: "gradients/bert/encoder/layer_7/output/layer_normalization_16/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_132" + op: "L2Loss" + input: "gradients/bert/encoder/layer_7/output/layer_normalization_16/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_133" + op: "L2Loss" + input: "gradients/bert/encoder/layer_8/attention/self/query/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_134" + op: "L2Loss" + input: "gradients/bert/encoder/layer_8/attention/self/query/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_135" + op: "L2Loss" + input: "gradients/bert/encoder/layer_8/attention/self/key/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_136" + op: "L2Loss" + input: "gradients/bert/encoder/layer_8/attention/self/key/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_137" + op: "L2Loss" + input: "gradients/bert/encoder/layer_8/attention/self/value/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_138" + op: "L2Loss" + input: "gradients/bert/encoder/layer_8/attention/self/value/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_139" + op: "L2Loss" + input: "gradients/bert/encoder/layer_8/attention/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_140" + op: "L2Loss" + input: "gradients/bert/encoder/layer_8/attention/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_141" + op: "L2Loss" + input: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_142" + op: "L2Loss" + input: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_143" + op: "L2Loss" + input: "gradients/bert/encoder/layer_8/intermediate/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_144" + op: "L2Loss" + input: "gradients/bert/encoder/layer_8/intermediate/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_145" + op: "L2Loss" + input: "gradients/bert/encoder/layer_8/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_146" + op: "L2Loss" + input: "gradients/bert/encoder/layer_8/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_147" + op: "L2Loss" + input: "gradients/bert/encoder/layer_8/output/layer_normalization_18/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_148" + op: "L2Loss" + input: "gradients/bert/encoder/layer_8/output/layer_normalization_18/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_149" + op: "L2Loss" + input: "gradients/bert/encoder/layer_9/attention/self/query/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_150" + op: "L2Loss" + input: "gradients/bert/encoder/layer_9/attention/self/query/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_151" + op: "L2Loss" + input: "gradients/bert/encoder/layer_9/attention/self/key/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_152" + op: "L2Loss" + input: "gradients/bert/encoder/layer_9/attention/self/key/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_153" + op: "L2Loss" + input: "gradients/bert/encoder/layer_9/attention/self/value/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_154" + op: "L2Loss" + input: "gradients/bert/encoder/layer_9/attention/self/value/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_155" + op: "L2Loss" + input: "gradients/bert/encoder/layer_9/attention/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_156" + op: "L2Loss" + input: "gradients/bert/encoder/layer_9/attention/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_157" + op: "L2Loss" + input: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_158" + op: "L2Loss" + input: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_159" + op: "L2Loss" + input: "gradients/bert/encoder/layer_9/intermediate/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_160" + op: "L2Loss" + input: "gradients/bert/encoder/layer_9/intermediate/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_161" + op: "L2Loss" + input: "gradients/bert/encoder/layer_9/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_162" + op: "L2Loss" + input: "gradients/bert/encoder/layer_9/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_163" + op: "L2Loss" + input: "gradients/bert/encoder/layer_9/output/layer_normalization_20/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_164" + op: "L2Loss" + input: "gradients/bert/encoder/layer_9/output/layer_normalization_20/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_165" + op: "L2Loss" + input: "gradients/bert/encoder/layer_10/attention/self/query/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_166" + op: "L2Loss" + input: "gradients/bert/encoder/layer_10/attention/self/query/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_167" + op: "L2Loss" + input: "gradients/bert/encoder/layer_10/attention/self/key/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_168" + op: "L2Loss" + input: "gradients/bert/encoder/layer_10/attention/self/key/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_169" + op: "L2Loss" + input: "gradients/bert/encoder/layer_10/attention/self/value/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_170" + op: "L2Loss" + input: "gradients/bert/encoder/layer_10/attention/self/value/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_171" + op: "L2Loss" + input: "gradients/bert/encoder/layer_10/attention/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_172" + op: "L2Loss" + input: "gradients/bert/encoder/layer_10/attention/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_173" + op: "L2Loss" + input: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_174" + op: "L2Loss" + input: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_175" + op: "L2Loss" + input: "gradients/bert/encoder/layer_10/intermediate/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_176" + op: "L2Loss" + input: "gradients/bert/encoder/layer_10/intermediate/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_177" + op: "L2Loss" + input: "gradients/bert/encoder/layer_10/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_178" + op: "L2Loss" + input: "gradients/bert/encoder/layer_10/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_179" + op: "L2Loss" + input: "gradients/bert/encoder/layer_10/output/layer_normalization_22/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_180" + op: "L2Loss" + input: "gradients/bert/encoder/layer_10/output/layer_normalization_22/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_181" + op: "L2Loss" + input: "gradients/bert/encoder/layer_11/attention/self/query/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_182" + op: "L2Loss" + input: "gradients/bert/encoder/layer_11/attention/self/query/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_183" + op: "L2Loss" + input: "gradients/bert/encoder/layer_11/attention/self/key/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_184" + op: "L2Loss" + input: "gradients/bert/encoder/layer_11/attention/self/key/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_185" + op: "L2Loss" + input: "gradients/bert/encoder/layer_11/attention/self/value/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_186" + op: "L2Loss" + input: "gradients/bert/encoder/layer_11/attention/self/value/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_187" + op: "L2Loss" + input: "gradients/bert/encoder/layer_11/attention/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_188" + op: "L2Loss" + input: "gradients/bert/encoder/layer_11/attention/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_189" + op: "L2Loss" + input: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_190" + op: "L2Loss" + input: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_191" + op: "L2Loss" + input: "gradients/bert/encoder/layer_11/intermediate/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_192" + op: "L2Loss" + input: "gradients/bert/encoder/layer_11/intermediate/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_193" + op: "L2Loss" + input: "gradients/bert/encoder/layer_11/output/dense/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_194" + op: "L2Loss" + input: "gradients/bert/encoder/layer_11/output/dense/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_195" + op: "L2Loss" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/mul_2_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_196" + op: "L2Loss" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/add_grad/Reshape" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_197" + op: "L2Loss" + input: "gradients/MatMul_grad/MatMul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/L2Loss_198" + op: "L2Loss" + input: "gradients/BiasAdd_grad/BiasAddGrad" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/stack" + op: "Pack" + input: "global_norm/L2Loss" + input: "global_norm/L2Loss_1" + input: "global_norm/L2Loss_2" + input: "global_norm/L2Loss_3" + input: "global_norm/L2Loss_4" + input: "global_norm/L2Loss_5" + input: "global_norm/L2Loss_6" + input: "global_norm/L2Loss_7" + input: "global_norm/L2Loss_8" + input: "global_norm/L2Loss_9" + input: "global_norm/L2Loss_10" + input: "global_norm/L2Loss_11" + input: "global_norm/L2Loss_12" + input: "global_norm/L2Loss_13" + input: "global_norm/L2Loss_14" + input: "global_norm/L2Loss_15" + input: "global_norm/L2Loss_16" + input: "global_norm/L2Loss_17" + input: "global_norm/L2Loss_18" + input: "global_norm/L2Loss_19" + input: "global_norm/L2Loss_20" + input: "global_norm/L2Loss_21" + input: "global_norm/L2Loss_22" + input: "global_norm/L2Loss_23" + input: "global_norm/L2Loss_24" + input: "global_norm/L2Loss_25" + input: "global_norm/L2Loss_26" + input: "global_norm/L2Loss_27" + input: "global_norm/L2Loss_28" + input: "global_norm/L2Loss_29" + input: "global_norm/L2Loss_30" + input: "global_norm/L2Loss_31" + input: "global_norm/L2Loss_32" + input: "global_norm/L2Loss_33" + input: "global_norm/L2Loss_34" + input: "global_norm/L2Loss_35" + input: "global_norm/L2Loss_36" + input: "global_norm/L2Loss_37" + input: "global_norm/L2Loss_38" + input: "global_norm/L2Loss_39" + input: "global_norm/L2Loss_40" + input: "global_norm/L2Loss_41" + input: "global_norm/L2Loss_42" + input: "global_norm/L2Loss_43" + input: "global_norm/L2Loss_44" + input: "global_norm/L2Loss_45" + input: "global_norm/L2Loss_46" + input: "global_norm/L2Loss_47" + input: "global_norm/L2Loss_48" + input: "global_norm/L2Loss_49" + input: "global_norm/L2Loss_50" + input: "global_norm/L2Loss_51" + input: "global_norm/L2Loss_52" + input: "global_norm/L2Loss_53" + input: "global_norm/L2Loss_54" + input: "global_norm/L2Loss_55" + input: "global_norm/L2Loss_56" + input: "global_norm/L2Loss_57" + input: "global_norm/L2Loss_58" + input: "global_norm/L2Loss_59" + input: "global_norm/L2Loss_60" + input: "global_norm/L2Loss_61" + input: "global_norm/L2Loss_62" + input: "global_norm/L2Loss_63" + input: "global_norm/L2Loss_64" + input: "global_norm/L2Loss_65" + input: "global_norm/L2Loss_66" + input: "global_norm/L2Loss_67" + input: "global_norm/L2Loss_68" + input: "global_norm/L2Loss_69" + input: "global_norm/L2Loss_70" + input: "global_norm/L2Loss_71" + input: "global_norm/L2Loss_72" + input: "global_norm/L2Loss_73" + input: "global_norm/L2Loss_74" + input: "global_norm/L2Loss_75" + input: "global_norm/L2Loss_76" + input: "global_norm/L2Loss_77" + input: "global_norm/L2Loss_78" + input: "global_norm/L2Loss_79" + input: "global_norm/L2Loss_80" + input: "global_norm/L2Loss_81" + input: "global_norm/L2Loss_82" + input: "global_norm/L2Loss_83" + input: "global_norm/L2Loss_84" + input: "global_norm/L2Loss_85" + input: "global_norm/L2Loss_86" + input: "global_norm/L2Loss_87" + input: "global_norm/L2Loss_88" + input: "global_norm/L2Loss_89" + input: "global_norm/L2Loss_90" + input: "global_norm/L2Loss_91" + input: "global_norm/L2Loss_92" + input: "global_norm/L2Loss_93" + input: "global_norm/L2Loss_94" + input: "global_norm/L2Loss_95" + input: "global_norm/L2Loss_96" + input: "global_norm/L2Loss_97" + input: "global_norm/L2Loss_98" + input: "global_norm/L2Loss_99" + input: "global_norm/L2Loss_100" + input: "global_norm/L2Loss_101" + input: "global_norm/L2Loss_102" + input: "global_norm/L2Loss_103" + input: "global_norm/L2Loss_104" + input: "global_norm/L2Loss_105" + input: "global_norm/L2Loss_106" + input: "global_norm/L2Loss_107" + input: "global_norm/L2Loss_108" + input: "global_norm/L2Loss_109" + input: "global_norm/L2Loss_110" + input: "global_norm/L2Loss_111" + input: "global_norm/L2Loss_112" + input: "global_norm/L2Loss_113" + input: "global_norm/L2Loss_114" + input: "global_norm/L2Loss_115" + input: "global_norm/L2Loss_116" + input: "global_norm/L2Loss_117" + input: "global_norm/L2Loss_118" + input: "global_norm/L2Loss_119" + input: "global_norm/L2Loss_120" + input: "global_norm/L2Loss_121" + input: "global_norm/L2Loss_122" + input: "global_norm/L2Loss_123" + input: "global_norm/L2Loss_124" + input: "global_norm/L2Loss_125" + input: "global_norm/L2Loss_126" + input: "global_norm/L2Loss_127" + input: "global_norm/L2Loss_128" + input: "global_norm/L2Loss_129" + input: "global_norm/L2Loss_130" + input: "global_norm/L2Loss_131" + input: "global_norm/L2Loss_132" + input: "global_norm/L2Loss_133" + input: "global_norm/L2Loss_134" + input: "global_norm/L2Loss_135" + input: "global_norm/L2Loss_136" + input: "global_norm/L2Loss_137" + input: "global_norm/L2Loss_138" + input: "global_norm/L2Loss_139" + input: "global_norm/L2Loss_140" + input: "global_norm/L2Loss_141" + input: "global_norm/L2Loss_142" + input: "global_norm/L2Loss_143" + input: "global_norm/L2Loss_144" + input: "global_norm/L2Loss_145" + input: "global_norm/L2Loss_146" + input: "global_norm/L2Loss_147" + input: "global_norm/L2Loss_148" + input: "global_norm/L2Loss_149" + input: "global_norm/L2Loss_150" + input: "global_norm/L2Loss_151" + input: "global_norm/L2Loss_152" + input: "global_norm/L2Loss_153" + input: "global_norm/L2Loss_154" + input: "global_norm/L2Loss_155" + input: "global_norm/L2Loss_156" + input: "global_norm/L2Loss_157" + input: "global_norm/L2Loss_158" + input: "global_norm/L2Loss_159" + input: "global_norm/L2Loss_160" + input: "global_norm/L2Loss_161" + input: "global_norm/L2Loss_162" + input: "global_norm/L2Loss_163" + input: "global_norm/L2Loss_164" + input: "global_norm/L2Loss_165" + input: "global_norm/L2Loss_166" + input: "global_norm/L2Loss_167" + input: "global_norm/L2Loss_168" + input: "global_norm/L2Loss_169" + input: "global_norm/L2Loss_170" + input: "global_norm/L2Loss_171" + input: "global_norm/L2Loss_172" + input: "global_norm/L2Loss_173" + input: "global_norm/L2Loss_174" + input: "global_norm/L2Loss_175" + input: "global_norm/L2Loss_176" + input: "global_norm/L2Loss_177" + input: "global_norm/L2Loss_178" + input: "global_norm/L2Loss_179" + input: "global_norm/L2Loss_180" + input: "global_norm/L2Loss_181" + input: "global_norm/L2Loss_182" + input: "global_norm/L2Loss_183" + input: "global_norm/L2Loss_184" + input: "global_norm/L2Loss_185" + input: "global_norm/L2Loss_186" + input: "global_norm/L2Loss_187" + input: "global_norm/L2Loss_188" + input: "global_norm/L2Loss_189" + input: "global_norm/L2Loss_190" + input: "global_norm/L2Loss_191" + input: "global_norm/L2Loss_192" + input: "global_norm/L2Loss_193" + input: "global_norm/L2Loss_194" + input: "global_norm/L2Loss_195" + input: "global_norm/L2Loss_196" + input: "global_norm/L2Loss_197" + input: "global_norm/L2Loss_198" + attr { + key: "N" + value { + i: 199 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 199 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "global_norm/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "global_norm/Sum" + op: "Sum" + input: "global_norm/stack" + input: "global_norm/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "global_norm/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 2.0 + } + } + } +} +node { + name: "global_norm/mul" + op: "Mul" + input: "global_norm/Sum" + input: "global_norm/Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "global_norm/global_norm" + op: "Sqrt" + input: "global_norm/mul" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "clip_by_global_norm/truediv/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "clip_by_global_norm/truediv" + op: "RealDiv" + input: "clip_by_global_norm/truediv/x" + input: "global_norm/global_norm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "clip_by_global_norm/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "clip_by_global_norm/truediv_1/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "clip_by_global_norm/truediv_1" + op: "RealDiv" + input: "clip_by_global_norm/Const" + input: "clip_by_global_norm/truediv_1/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "clip_by_global_norm/Minimum" + op: "Minimum" + input: "clip_by_global_norm/truediv" + input: "clip_by_global_norm/truediv_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "clip_by_global_norm/mul/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 1.0 + } + } + } +} +node { + name: "clip_by_global_norm/mul" + op: "Mul" + input: "clip_by_global_norm/mul/x" + input: "clip_by_global_norm/Minimum" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "clip_by_global_norm/sub" + op: "Sub" + input: "global_norm/global_norm" + input: "global_norm/global_norm" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "clip_by_global_norm/add" + op: "AddV2" + input: "clip_by_global_norm/mul" + input: "clip_by_global_norm/sub" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_1" + op: "Mul" + input: "gradients/bert/embeddings/Gather_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Gather" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_0" + op: "Identity" + input: "clip_by_global_norm/mul_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Gather" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1152 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_2" + op: "Mul" + input: "gradients/bert/embeddings/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_1" + op: "Identity" + input: "clip_by_global_norm/mul_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_3" + op: "Mul" + input: "gradients/bert/embeddings/Slice_grad/Pad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Slice" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_2" + op: "Identity" + input: "clip_by_global_norm/mul_3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/Slice" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_4" + op: "Mul" + input: "gradients/bert/embeddings/layer_normalization/mul_3_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_3" + op: "Identity" + input: "clip_by_global_norm/mul_4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/mul_3" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_5" + op: "Mul" + input: "gradients/bert/embeddings/layer_normalization/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_4" + op: "Identity" + input: "clip_by_global_norm/mul_5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_6" + op: "Mul" + input: "gradients/bert/encoder/layer_0/attention/self/query/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_5" + op: "Identity" + input: "clip_by_global_norm/mul_6" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_7" + op: "Mul" + input: "gradients/bert/encoder/layer_0/attention/self/query/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_6" + op: "Identity" + input: "clip_by_global_norm/mul_7" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_8" + op: "Mul" + input: "gradients/bert/encoder/layer_0/attention/self/key/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_7" + op: "Identity" + input: "clip_by_global_norm/mul_8" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_9" + op: "Mul" + input: "gradients/bert/encoder/layer_0/attention/self/key/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_8" + op: "Identity" + input: "clip_by_global_norm/mul_9" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_10" + op: "Mul" + input: "gradients/bert/encoder/layer_0/attention/self/value/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_9" + op: "Identity" + input: "clip_by_global_norm/mul_10" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_11" + op: "Mul" + input: "gradients/bert/encoder/layer_0/attention/self/value/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_10" + op: "Identity" + input: "clip_by_global_norm/mul_11" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_12" + op: "Mul" + input: "gradients/bert/encoder/layer_0/attention/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_11" + op: "Identity" + input: "clip_by_global_norm/mul_12" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_13" + op: "Mul" + input: "gradients/bert/encoder/layer_0/attention/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_12" + op: "Identity" + input: "clip_by_global_norm/mul_13" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_14" + op: "Mul" + input: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_13" + op: "Identity" + input: "clip_by_global_norm/mul_14" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_15" + op: "Mul" + input: "gradients/bert/encoder/layer_0/attention/output/layer_normalization_1/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_14" + op: "Identity" + input: "clip_by_global_norm/mul_15" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_16" + op: "Mul" + input: "gradients/bert/encoder/layer_0/intermediate/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_15" + op: "Identity" + input: "clip_by_global_norm/mul_16" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_17" + op: "Mul" + input: "gradients/bert/encoder/layer_0/intermediate/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_16" + op: "Identity" + input: "clip_by_global_norm/mul_17" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_18" + op: "Mul" + input: "gradients/bert/encoder/layer_0/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_17" + op: "Identity" + input: "clip_by_global_norm/mul_18" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_19" + op: "Mul" + input: "gradients/bert/encoder/layer_0/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_18" + op: "Identity" + input: "clip_by_global_norm/mul_19" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_20" + op: "Mul" + input: "gradients/bert/encoder/layer_0/output/layer_normalization_2/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_19" + op: "Identity" + input: "clip_by_global_norm/mul_20" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_21" + op: "Mul" + input: "gradients/bert/encoder/layer_0/output/layer_normalization_2/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_20" + op: "Identity" + input: "clip_by_global_norm/mul_21" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_22" + op: "Mul" + input: "gradients/bert/encoder/layer_1/attention/self/query/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_21" + op: "Identity" + input: "clip_by_global_norm/mul_22" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_23" + op: "Mul" + input: "gradients/bert/encoder/layer_1/attention/self/query/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_22" + op: "Identity" + input: "clip_by_global_norm/mul_23" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_24" + op: "Mul" + input: "gradients/bert/encoder/layer_1/attention/self/key/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_23" + op: "Identity" + input: "clip_by_global_norm/mul_24" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_25" + op: "Mul" + input: "gradients/bert/encoder/layer_1/attention/self/key/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_24" + op: "Identity" + input: "clip_by_global_norm/mul_25" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_26" + op: "Mul" + input: "gradients/bert/encoder/layer_1/attention/self/value/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_25" + op: "Identity" + input: "clip_by_global_norm/mul_26" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_27" + op: "Mul" + input: "gradients/bert/encoder/layer_1/attention/self/value/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_26" + op: "Identity" + input: "clip_by_global_norm/mul_27" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_28" + op: "Mul" + input: "gradients/bert/encoder/layer_1/attention/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_27" + op: "Identity" + input: "clip_by_global_norm/mul_28" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_29" + op: "Mul" + input: "gradients/bert/encoder/layer_1/attention/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_28" + op: "Identity" + input: "clip_by_global_norm/mul_29" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_30" + op: "Mul" + input: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_29" + op: "Identity" + input: "clip_by_global_norm/mul_30" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_31" + op: "Mul" + input: "gradients/bert/encoder/layer_1/attention/output/layer_normalization_3/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_30" + op: "Identity" + input: "clip_by_global_norm/mul_31" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_32" + op: "Mul" + input: "gradients/bert/encoder/layer_1/intermediate/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_31" + op: "Identity" + input: "clip_by_global_norm/mul_32" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_33" + op: "Mul" + input: "gradients/bert/encoder/layer_1/intermediate/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_32" + op: "Identity" + input: "clip_by_global_norm/mul_33" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_34" + op: "Mul" + input: "gradients/bert/encoder/layer_1/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_33" + op: "Identity" + input: "clip_by_global_norm/mul_34" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_35" + op: "Mul" + input: "gradients/bert/encoder/layer_1/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_34" + op: "Identity" + input: "clip_by_global_norm/mul_35" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_36" + op: "Mul" + input: "gradients/bert/encoder/layer_1/output/layer_normalization_4/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_35" + op: "Identity" + input: "clip_by_global_norm/mul_36" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_37" + op: "Mul" + input: "gradients/bert/encoder/layer_1/output/layer_normalization_4/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_36" + op: "Identity" + input: "clip_by_global_norm/mul_37" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_38" + op: "Mul" + input: "gradients/bert/encoder/layer_2/attention/self/query/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_37" + op: "Identity" + input: "clip_by_global_norm/mul_38" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_39" + op: "Mul" + input: "gradients/bert/encoder/layer_2/attention/self/query/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_38" + op: "Identity" + input: "clip_by_global_norm/mul_39" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_40" + op: "Mul" + input: "gradients/bert/encoder/layer_2/attention/self/key/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_39" + op: "Identity" + input: "clip_by_global_norm/mul_40" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_41" + op: "Mul" + input: "gradients/bert/encoder/layer_2/attention/self/key/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_40" + op: "Identity" + input: "clip_by_global_norm/mul_41" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_42" + op: "Mul" + input: "gradients/bert/encoder/layer_2/attention/self/value/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_41" + op: "Identity" + input: "clip_by_global_norm/mul_42" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_43" + op: "Mul" + input: "gradients/bert/encoder/layer_2/attention/self/value/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_42" + op: "Identity" + input: "clip_by_global_norm/mul_43" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_44" + op: "Mul" + input: "gradients/bert/encoder/layer_2/attention/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_43" + op: "Identity" + input: "clip_by_global_norm/mul_44" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_45" + op: "Mul" + input: "gradients/bert/encoder/layer_2/attention/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_44" + op: "Identity" + input: "clip_by_global_norm/mul_45" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_46" + op: "Mul" + input: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_45" + op: "Identity" + input: "clip_by_global_norm/mul_46" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_47" + op: "Mul" + input: "gradients/bert/encoder/layer_2/attention/output/layer_normalization_5/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_46" + op: "Identity" + input: "clip_by_global_norm/mul_47" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_48" + op: "Mul" + input: "gradients/bert/encoder/layer_2/intermediate/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_47" + op: "Identity" + input: "clip_by_global_norm/mul_48" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_49" + op: "Mul" + input: "gradients/bert/encoder/layer_2/intermediate/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_48" + op: "Identity" + input: "clip_by_global_norm/mul_49" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_50" + op: "Mul" + input: "gradients/bert/encoder/layer_2/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_49" + op: "Identity" + input: "clip_by_global_norm/mul_50" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_51" + op: "Mul" + input: "gradients/bert/encoder/layer_2/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_50" + op: "Identity" + input: "clip_by_global_norm/mul_51" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_52" + op: "Mul" + input: "gradients/bert/encoder/layer_2/output/layer_normalization_6/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_51" + op: "Identity" + input: "clip_by_global_norm/mul_52" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_53" + op: "Mul" + input: "gradients/bert/encoder/layer_2/output/layer_normalization_6/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_52" + op: "Identity" + input: "clip_by_global_norm/mul_53" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_54" + op: "Mul" + input: "gradients/bert/encoder/layer_3/attention/self/query/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_53" + op: "Identity" + input: "clip_by_global_norm/mul_54" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_55" + op: "Mul" + input: "gradients/bert/encoder/layer_3/attention/self/query/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_54" + op: "Identity" + input: "clip_by_global_norm/mul_55" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_56" + op: "Mul" + input: "gradients/bert/encoder/layer_3/attention/self/key/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_55" + op: "Identity" + input: "clip_by_global_norm/mul_56" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_57" + op: "Mul" + input: "gradients/bert/encoder/layer_3/attention/self/key/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_56" + op: "Identity" + input: "clip_by_global_norm/mul_57" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_58" + op: "Mul" + input: "gradients/bert/encoder/layer_3/attention/self/value/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_57" + op: "Identity" + input: "clip_by_global_norm/mul_58" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_59" + op: "Mul" + input: "gradients/bert/encoder/layer_3/attention/self/value/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_58" + op: "Identity" + input: "clip_by_global_norm/mul_59" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_60" + op: "Mul" + input: "gradients/bert/encoder/layer_3/attention/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_59" + op: "Identity" + input: "clip_by_global_norm/mul_60" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_61" + op: "Mul" + input: "gradients/bert/encoder/layer_3/attention/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_60" + op: "Identity" + input: "clip_by_global_norm/mul_61" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_62" + op: "Mul" + input: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_61" + op: "Identity" + input: "clip_by_global_norm/mul_62" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_63" + op: "Mul" + input: "gradients/bert/encoder/layer_3/attention/output/layer_normalization_7/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_62" + op: "Identity" + input: "clip_by_global_norm/mul_63" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_64" + op: "Mul" + input: "gradients/bert/encoder/layer_3/intermediate/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_63" + op: "Identity" + input: "clip_by_global_norm/mul_64" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_65" + op: "Mul" + input: "gradients/bert/encoder/layer_3/intermediate/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_64" + op: "Identity" + input: "clip_by_global_norm/mul_65" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_66" + op: "Mul" + input: "gradients/bert/encoder/layer_3/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_65" + op: "Identity" + input: "clip_by_global_norm/mul_66" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_67" + op: "Mul" + input: "gradients/bert/encoder/layer_3/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_66" + op: "Identity" + input: "clip_by_global_norm/mul_67" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_68" + op: "Mul" + input: "gradients/bert/encoder/layer_3/output/layer_normalization_8/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_67" + op: "Identity" + input: "clip_by_global_norm/mul_68" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_69" + op: "Mul" + input: "gradients/bert/encoder/layer_3/output/layer_normalization_8/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_68" + op: "Identity" + input: "clip_by_global_norm/mul_69" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_70" + op: "Mul" + input: "gradients/bert/encoder/layer_4/attention/self/query/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_69" + op: "Identity" + input: "clip_by_global_norm/mul_70" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_71" + op: "Mul" + input: "gradients/bert/encoder/layer_4/attention/self/query/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_70" + op: "Identity" + input: "clip_by_global_norm/mul_71" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_72" + op: "Mul" + input: "gradients/bert/encoder/layer_4/attention/self/key/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_71" + op: "Identity" + input: "clip_by_global_norm/mul_72" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_73" + op: "Mul" + input: "gradients/bert/encoder/layer_4/attention/self/key/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_72" + op: "Identity" + input: "clip_by_global_norm/mul_73" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_74" + op: "Mul" + input: "gradients/bert/encoder/layer_4/attention/self/value/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_73" + op: "Identity" + input: "clip_by_global_norm/mul_74" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_75" + op: "Mul" + input: "gradients/bert/encoder/layer_4/attention/self/value/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_74" + op: "Identity" + input: "clip_by_global_norm/mul_75" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_76" + op: "Mul" + input: "gradients/bert/encoder/layer_4/attention/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_75" + op: "Identity" + input: "clip_by_global_norm/mul_76" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_77" + op: "Mul" + input: "gradients/bert/encoder/layer_4/attention/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_76" + op: "Identity" + input: "clip_by_global_norm/mul_77" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_78" + op: "Mul" + input: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_77" + op: "Identity" + input: "clip_by_global_norm/mul_78" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_79" + op: "Mul" + input: "gradients/bert/encoder/layer_4/attention/output/layer_normalization_9/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_78" + op: "Identity" + input: "clip_by_global_norm/mul_79" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_80" + op: "Mul" + input: "gradients/bert/encoder/layer_4/intermediate/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_79" + op: "Identity" + input: "clip_by_global_norm/mul_80" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_81" + op: "Mul" + input: "gradients/bert/encoder/layer_4/intermediate/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_80" + op: "Identity" + input: "clip_by_global_norm/mul_81" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_82" + op: "Mul" + input: "gradients/bert/encoder/layer_4/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_81" + op: "Identity" + input: "clip_by_global_norm/mul_82" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_83" + op: "Mul" + input: "gradients/bert/encoder/layer_4/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_82" + op: "Identity" + input: "clip_by_global_norm/mul_83" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_84" + op: "Mul" + input: "gradients/bert/encoder/layer_4/output/layer_normalization_10/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_83" + op: "Identity" + input: "clip_by_global_norm/mul_84" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_85" + op: "Mul" + input: "gradients/bert/encoder/layer_4/output/layer_normalization_10/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_84" + op: "Identity" + input: "clip_by_global_norm/mul_85" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_86" + op: "Mul" + input: "gradients/bert/encoder/layer_5/attention/self/query/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_85" + op: "Identity" + input: "clip_by_global_norm/mul_86" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_87" + op: "Mul" + input: "gradients/bert/encoder/layer_5/attention/self/query/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_86" + op: "Identity" + input: "clip_by_global_norm/mul_87" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_88" + op: "Mul" + input: "gradients/bert/encoder/layer_5/attention/self/key/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_87" + op: "Identity" + input: "clip_by_global_norm/mul_88" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_89" + op: "Mul" + input: "gradients/bert/encoder/layer_5/attention/self/key/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_88" + op: "Identity" + input: "clip_by_global_norm/mul_89" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_90" + op: "Mul" + input: "gradients/bert/encoder/layer_5/attention/self/value/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_89" + op: "Identity" + input: "clip_by_global_norm/mul_90" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_91" + op: "Mul" + input: "gradients/bert/encoder/layer_5/attention/self/value/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_90" + op: "Identity" + input: "clip_by_global_norm/mul_91" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_92" + op: "Mul" + input: "gradients/bert/encoder/layer_5/attention/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_91" + op: "Identity" + input: "clip_by_global_norm/mul_92" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_93" + op: "Mul" + input: "gradients/bert/encoder/layer_5/attention/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_92" + op: "Identity" + input: "clip_by_global_norm/mul_93" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_94" + op: "Mul" + input: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_93" + op: "Identity" + input: "clip_by_global_norm/mul_94" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_95" + op: "Mul" + input: "gradients/bert/encoder/layer_5/attention/output/layer_normalization_11/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_94" + op: "Identity" + input: "clip_by_global_norm/mul_95" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_96" + op: "Mul" + input: "gradients/bert/encoder/layer_5/intermediate/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_95" + op: "Identity" + input: "clip_by_global_norm/mul_96" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_97" + op: "Mul" + input: "gradients/bert/encoder/layer_5/intermediate/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_96" + op: "Identity" + input: "clip_by_global_norm/mul_97" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_98" + op: "Mul" + input: "gradients/bert/encoder/layer_5/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_97" + op: "Identity" + input: "clip_by_global_norm/mul_98" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_99" + op: "Mul" + input: "gradients/bert/encoder/layer_5/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_98" + op: "Identity" + input: "clip_by_global_norm/mul_99" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_100" + op: "Mul" + input: "gradients/bert/encoder/layer_5/output/layer_normalization_12/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_99" + op: "Identity" + input: "clip_by_global_norm/mul_100" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_101" + op: "Mul" + input: "gradients/bert/encoder/layer_5/output/layer_normalization_12/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_100" + op: "Identity" + input: "clip_by_global_norm/mul_101" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_102" + op: "Mul" + input: "gradients/bert/encoder/layer_6/attention/self/query/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_101" + op: "Identity" + input: "clip_by_global_norm/mul_102" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_103" + op: "Mul" + input: "gradients/bert/encoder/layer_6/attention/self/query/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_102" + op: "Identity" + input: "clip_by_global_norm/mul_103" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_104" + op: "Mul" + input: "gradients/bert/encoder/layer_6/attention/self/key/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_103" + op: "Identity" + input: "clip_by_global_norm/mul_104" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_105" + op: "Mul" + input: "gradients/bert/encoder/layer_6/attention/self/key/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_104" + op: "Identity" + input: "clip_by_global_norm/mul_105" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_106" + op: "Mul" + input: "gradients/bert/encoder/layer_6/attention/self/value/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_105" + op: "Identity" + input: "clip_by_global_norm/mul_106" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_107" + op: "Mul" + input: "gradients/bert/encoder/layer_6/attention/self/value/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_106" + op: "Identity" + input: "clip_by_global_norm/mul_107" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_108" + op: "Mul" + input: "gradients/bert/encoder/layer_6/attention/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_107" + op: "Identity" + input: "clip_by_global_norm/mul_108" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_109" + op: "Mul" + input: "gradients/bert/encoder/layer_6/attention/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_108" + op: "Identity" + input: "clip_by_global_norm/mul_109" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_110" + op: "Mul" + input: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_109" + op: "Identity" + input: "clip_by_global_norm/mul_110" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_111" + op: "Mul" + input: "gradients/bert/encoder/layer_6/attention/output/layer_normalization_13/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_110" + op: "Identity" + input: "clip_by_global_norm/mul_111" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_112" + op: "Mul" + input: "gradients/bert/encoder/layer_6/intermediate/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_111" + op: "Identity" + input: "clip_by_global_norm/mul_112" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_113" + op: "Mul" + input: "gradients/bert/encoder/layer_6/intermediate/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_112" + op: "Identity" + input: "clip_by_global_norm/mul_113" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_114" + op: "Mul" + input: "gradients/bert/encoder/layer_6/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_113" + op: "Identity" + input: "clip_by_global_norm/mul_114" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_115" + op: "Mul" + input: "gradients/bert/encoder/layer_6/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_114" + op: "Identity" + input: "clip_by_global_norm/mul_115" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_116" + op: "Mul" + input: "gradients/bert/encoder/layer_6/output/layer_normalization_14/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_115" + op: "Identity" + input: "clip_by_global_norm/mul_116" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_117" + op: "Mul" + input: "gradients/bert/encoder/layer_6/output/layer_normalization_14/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_116" + op: "Identity" + input: "clip_by_global_norm/mul_117" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_118" + op: "Mul" + input: "gradients/bert/encoder/layer_7/attention/self/query/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_117" + op: "Identity" + input: "clip_by_global_norm/mul_118" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_119" + op: "Mul" + input: "gradients/bert/encoder/layer_7/attention/self/query/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_118" + op: "Identity" + input: "clip_by_global_norm/mul_119" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_120" + op: "Mul" + input: "gradients/bert/encoder/layer_7/attention/self/key/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_119" + op: "Identity" + input: "clip_by_global_norm/mul_120" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_121" + op: "Mul" + input: "gradients/bert/encoder/layer_7/attention/self/key/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_120" + op: "Identity" + input: "clip_by_global_norm/mul_121" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_122" + op: "Mul" + input: "gradients/bert/encoder/layer_7/attention/self/value/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_121" + op: "Identity" + input: "clip_by_global_norm/mul_122" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_123" + op: "Mul" + input: "gradients/bert/encoder/layer_7/attention/self/value/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_122" + op: "Identity" + input: "clip_by_global_norm/mul_123" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_124" + op: "Mul" + input: "gradients/bert/encoder/layer_7/attention/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_123" + op: "Identity" + input: "clip_by_global_norm/mul_124" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_125" + op: "Mul" + input: "gradients/bert/encoder/layer_7/attention/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_124" + op: "Identity" + input: "clip_by_global_norm/mul_125" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_126" + op: "Mul" + input: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_125" + op: "Identity" + input: "clip_by_global_norm/mul_126" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_127" + op: "Mul" + input: "gradients/bert/encoder/layer_7/attention/output/layer_normalization_15/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_126" + op: "Identity" + input: "clip_by_global_norm/mul_127" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_128" + op: "Mul" + input: "gradients/bert/encoder/layer_7/intermediate/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_127" + op: "Identity" + input: "clip_by_global_norm/mul_128" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_129" + op: "Mul" + input: "gradients/bert/encoder/layer_7/intermediate/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_128" + op: "Identity" + input: "clip_by_global_norm/mul_129" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_130" + op: "Mul" + input: "gradients/bert/encoder/layer_7/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_129" + op: "Identity" + input: "clip_by_global_norm/mul_130" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_131" + op: "Mul" + input: "gradients/bert/encoder/layer_7/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_130" + op: "Identity" + input: "clip_by_global_norm/mul_131" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_132" + op: "Mul" + input: "gradients/bert/encoder/layer_7/output/layer_normalization_16/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_131" + op: "Identity" + input: "clip_by_global_norm/mul_132" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_133" + op: "Mul" + input: "gradients/bert/encoder/layer_7/output/layer_normalization_16/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_132" + op: "Identity" + input: "clip_by_global_norm/mul_133" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_134" + op: "Mul" + input: "gradients/bert/encoder/layer_8/attention/self/query/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_133" + op: "Identity" + input: "clip_by_global_norm/mul_134" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_135" + op: "Mul" + input: "gradients/bert/encoder/layer_8/attention/self/query/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_134" + op: "Identity" + input: "clip_by_global_norm/mul_135" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_136" + op: "Mul" + input: "gradients/bert/encoder/layer_8/attention/self/key/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_135" + op: "Identity" + input: "clip_by_global_norm/mul_136" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_137" + op: "Mul" + input: "gradients/bert/encoder/layer_8/attention/self/key/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_136" + op: "Identity" + input: "clip_by_global_norm/mul_137" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_138" + op: "Mul" + input: "gradients/bert/encoder/layer_8/attention/self/value/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_137" + op: "Identity" + input: "clip_by_global_norm/mul_138" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_139" + op: "Mul" + input: "gradients/bert/encoder/layer_8/attention/self/value/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_138" + op: "Identity" + input: "clip_by_global_norm/mul_139" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_140" + op: "Mul" + input: "gradients/bert/encoder/layer_8/attention/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_139" + op: "Identity" + input: "clip_by_global_norm/mul_140" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_141" + op: "Mul" + input: "gradients/bert/encoder/layer_8/attention/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_140" + op: "Identity" + input: "clip_by_global_norm/mul_141" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_142" + op: "Mul" + input: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_141" + op: "Identity" + input: "clip_by_global_norm/mul_142" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_143" + op: "Mul" + input: "gradients/bert/encoder/layer_8/attention/output/layer_normalization_17/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_142" + op: "Identity" + input: "clip_by_global_norm/mul_143" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_144" + op: "Mul" + input: "gradients/bert/encoder/layer_8/intermediate/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_143" + op: "Identity" + input: "clip_by_global_norm/mul_144" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_145" + op: "Mul" + input: "gradients/bert/encoder/layer_8/intermediate/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_144" + op: "Identity" + input: "clip_by_global_norm/mul_145" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_146" + op: "Mul" + input: "gradients/bert/encoder/layer_8/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_145" + op: "Identity" + input: "clip_by_global_norm/mul_146" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_147" + op: "Mul" + input: "gradients/bert/encoder/layer_8/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_146" + op: "Identity" + input: "clip_by_global_norm/mul_147" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_148" + op: "Mul" + input: "gradients/bert/encoder/layer_8/output/layer_normalization_18/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_147" + op: "Identity" + input: "clip_by_global_norm/mul_148" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_149" + op: "Mul" + input: "gradients/bert/encoder/layer_8/output/layer_normalization_18/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_148" + op: "Identity" + input: "clip_by_global_norm/mul_149" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_150" + op: "Mul" + input: "gradients/bert/encoder/layer_9/attention/self/query/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_149" + op: "Identity" + input: "clip_by_global_norm/mul_150" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_151" + op: "Mul" + input: "gradients/bert/encoder/layer_9/attention/self/query/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_150" + op: "Identity" + input: "clip_by_global_norm/mul_151" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_152" + op: "Mul" + input: "gradients/bert/encoder/layer_9/attention/self/key/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_151" + op: "Identity" + input: "clip_by_global_norm/mul_152" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_153" + op: "Mul" + input: "gradients/bert/encoder/layer_9/attention/self/key/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_152" + op: "Identity" + input: "clip_by_global_norm/mul_153" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_154" + op: "Mul" + input: "gradients/bert/encoder/layer_9/attention/self/value/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_153" + op: "Identity" + input: "clip_by_global_norm/mul_154" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_155" + op: "Mul" + input: "gradients/bert/encoder/layer_9/attention/self/value/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_154" + op: "Identity" + input: "clip_by_global_norm/mul_155" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_156" + op: "Mul" + input: "gradients/bert/encoder/layer_9/attention/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_155" + op: "Identity" + input: "clip_by_global_norm/mul_156" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_157" + op: "Mul" + input: "gradients/bert/encoder/layer_9/attention/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_156" + op: "Identity" + input: "clip_by_global_norm/mul_157" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_158" + op: "Mul" + input: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_157" + op: "Identity" + input: "clip_by_global_norm/mul_158" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_159" + op: "Mul" + input: "gradients/bert/encoder/layer_9/attention/output/layer_normalization_19/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_158" + op: "Identity" + input: "clip_by_global_norm/mul_159" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_160" + op: "Mul" + input: "gradients/bert/encoder/layer_9/intermediate/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_159" + op: "Identity" + input: "clip_by_global_norm/mul_160" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_161" + op: "Mul" + input: "gradients/bert/encoder/layer_9/intermediate/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_160" + op: "Identity" + input: "clip_by_global_norm/mul_161" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_162" + op: "Mul" + input: "gradients/bert/encoder/layer_9/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_161" + op: "Identity" + input: "clip_by_global_norm/mul_162" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_163" + op: "Mul" + input: "gradients/bert/encoder/layer_9/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_162" + op: "Identity" + input: "clip_by_global_norm/mul_163" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_164" + op: "Mul" + input: "gradients/bert/encoder/layer_9/output/layer_normalization_20/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_163" + op: "Identity" + input: "clip_by_global_norm/mul_164" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_165" + op: "Mul" + input: "gradients/bert/encoder/layer_9/output/layer_normalization_20/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_164" + op: "Identity" + input: "clip_by_global_norm/mul_165" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_166" + op: "Mul" + input: "gradients/bert/encoder/layer_10/attention/self/query/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_165" + op: "Identity" + input: "clip_by_global_norm/mul_166" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_167" + op: "Mul" + input: "gradients/bert/encoder/layer_10/attention/self/query/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_166" + op: "Identity" + input: "clip_by_global_norm/mul_167" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_168" + op: "Mul" + input: "gradients/bert/encoder/layer_10/attention/self/key/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_167" + op: "Identity" + input: "clip_by_global_norm/mul_168" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_169" + op: "Mul" + input: "gradients/bert/encoder/layer_10/attention/self/key/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_168" + op: "Identity" + input: "clip_by_global_norm/mul_169" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_170" + op: "Mul" + input: "gradients/bert/encoder/layer_10/attention/self/value/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_169" + op: "Identity" + input: "clip_by_global_norm/mul_170" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_171" + op: "Mul" + input: "gradients/bert/encoder/layer_10/attention/self/value/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_170" + op: "Identity" + input: "clip_by_global_norm/mul_171" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_172" + op: "Mul" + input: "gradients/bert/encoder/layer_10/attention/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_171" + op: "Identity" + input: "clip_by_global_norm/mul_172" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_173" + op: "Mul" + input: "gradients/bert/encoder/layer_10/attention/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_172" + op: "Identity" + input: "clip_by_global_norm/mul_173" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_174" + op: "Mul" + input: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_173" + op: "Identity" + input: "clip_by_global_norm/mul_174" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_175" + op: "Mul" + input: "gradients/bert/encoder/layer_10/attention/output/layer_normalization_21/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_174" + op: "Identity" + input: "clip_by_global_norm/mul_175" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_176" + op: "Mul" + input: "gradients/bert/encoder/layer_10/intermediate/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_175" + op: "Identity" + input: "clip_by_global_norm/mul_176" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_177" + op: "Mul" + input: "gradients/bert/encoder/layer_10/intermediate/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_176" + op: "Identity" + input: "clip_by_global_norm/mul_177" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_178" + op: "Mul" + input: "gradients/bert/encoder/layer_10/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_177" + op: "Identity" + input: "clip_by_global_norm/mul_178" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_179" + op: "Mul" + input: "gradients/bert/encoder/layer_10/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_178" + op: "Identity" + input: "clip_by_global_norm/mul_179" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_180" + op: "Mul" + input: "gradients/bert/encoder/layer_10/output/layer_normalization_22/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_179" + op: "Identity" + input: "clip_by_global_norm/mul_180" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_181" + op: "Mul" + input: "gradients/bert/encoder/layer_10/output/layer_normalization_22/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_180" + op: "Identity" + input: "clip_by_global_norm/mul_181" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_182" + op: "Mul" + input: "gradients/bert/encoder/layer_11/attention/self/query/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_181" + op: "Identity" + input: "clip_by_global_norm/mul_182" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_183" + op: "Mul" + input: "gradients/bert/encoder/layer_11/attention/self/query/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_182" + op: "Identity" + input: "clip_by_global_norm/mul_183" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_184" + op: "Mul" + input: "gradients/bert/encoder/layer_11/attention/self/key/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_183" + op: "Identity" + input: "clip_by_global_norm/mul_184" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_185" + op: "Mul" + input: "gradients/bert/encoder/layer_11/attention/self/key/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_184" + op: "Identity" + input: "clip_by_global_norm/mul_185" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_186" + op: "Mul" + input: "gradients/bert/encoder/layer_11/attention/self/value/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_185" + op: "Identity" + input: "clip_by_global_norm/mul_186" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_187" + op: "Mul" + input: "gradients/bert/encoder/layer_11/attention/self/value/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_186" + op: "Identity" + input: "clip_by_global_norm/mul_187" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_188" + op: "Mul" + input: "gradients/bert/encoder/layer_11/attention/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_187" + op: "Identity" + input: "clip_by_global_norm/mul_188" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_189" + op: "Mul" + input: "gradients/bert/encoder/layer_11/attention/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_188" + op: "Identity" + input: "clip_by_global_norm/mul_189" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_190" + op: "Mul" + input: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_189" + op: "Identity" + input: "clip_by_global_norm/mul_190" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_191" + op: "Mul" + input: "gradients/bert/encoder/layer_11/attention/output/layer_normalization_23/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_190" + op: "Identity" + input: "clip_by_global_norm/mul_191" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_192" + op: "Mul" + input: "gradients/bert/encoder/layer_11/intermediate/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_191" + op: "Identity" + input: "clip_by_global_norm/mul_192" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_193" + op: "Mul" + input: "gradients/bert/encoder/layer_11/intermediate/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_192" + op: "Identity" + input: "clip_by_global_norm/mul_193" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_194" + op: "Mul" + input: "gradients/bert/encoder/layer_11/output/dense/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_193" + op: "Identity" + input: "clip_by_global_norm/mul_194" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_195" + op: "Mul" + input: "gradients/bert/encoder/layer_11/output/dense/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_194" + op: "Identity" + input: "clip_by_global_norm/mul_195" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_196" + op: "Mul" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/mul_2_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_195" + op: "Identity" + input: "clip_by_global_norm/mul_196" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/mul_2" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_197" + op: "Mul" + input: "gradients/bert/encoder/layer_11/output/layer_normalization_24/add_grad/Reshape" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_196" + op: "Identity" + input: "clip_by_global_norm/mul_197" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/add" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_198" + op: "Mul" + input: "gradients/MatMul_grad/MatMul_1" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_199" + op: "Identity" + input: "clip_by_global_norm/mul_198" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@MatMul" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/mul_199" + op: "Mul" + input: "gradients/BiasAdd_grad/BiasAddGrad" + input: "clip_by_global_norm/add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } +} +node { + name: "clip_by_global_norm/clip_by_global_norm/_200" + op: "Identity" + input: "clip_by_global_norm/mul_199" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@BiasAdd" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } +} +node { + name: "bert/embeddings/word_embeddings/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/word_embeddings/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: ":w\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/embeddings/word_embeddings/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/word_embeddings/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/embeddings/word_embeddings/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/embeddings/word_embeddings/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/embeddings/word_embeddings/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/word_embeddings/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/embeddings/word_embeddings/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/word_embeddings/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/embeddings/word_embeddings/adam_m" + } + } +} +node { + name: "bert/embeddings/word_embeddings/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/embeddings/word_embeddings/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/embeddings/word_embeddings/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/embeddings/word_embeddings/adam_m" + input: "bert/embeddings/word_embeddings/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/embeddings/word_embeddings/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/word_embeddings/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/embeddings/word_embeddings/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/word_embeddings/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: ":w\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/embeddings/word_embeddings/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/word_embeddings/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/embeddings/word_embeddings/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/embeddings/word_embeddings/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/embeddings/word_embeddings/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/word_embeddings/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/embeddings/word_embeddings/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/word_embeddings/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/embeddings/word_embeddings/adam_v" + } + } +} +node { + name: "bert/embeddings/word_embeddings/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/embeddings/word_embeddings/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/embeddings/word_embeddings/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/embeddings/word_embeddings/adam_v" + input: "bert/embeddings/word_embeddings/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/embeddings/word_embeddings/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/word_embeddings/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_2/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_2/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/word_embeddings/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_2" + op: "Mul" + input: "Mul_2/x" + input: "Mul_2/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_3/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_3/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "Mul_3/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "Mul_3/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "Mul_3/strided_slice" + op: "StridedSlice" + input: "gradients/bert/embeddings/Gather_grad/Const" + input: "Mul_3/strided_slice/stack" + input: "Mul_3/strided_slice/stack_1" + input: "Mul_3/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "Mul_3/y" + op: "UnsortedSegmentSum" + input: "clip_by_global_norm/clip_by_global_norm/_0" + input: "gradients/bert/embeddings/Gather_grad/Reshape_1" + input: "Mul_3/strided_slice" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tindices" + value { + type: DT_INT32 + } + } + attr { + key: "Tnumsegments" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_3" + op: "Mul" + input: "Mul_3/x" + input: "Mul_3/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_1" + op: "AddV2" + input: "Mul_2" + input: "Mul_3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_4/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_4/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/word_embeddings/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_4" + op: "Mul" + input: "Mul_4/x" + input: "Mul_4/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square/strided_slice/stack" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "Square/strided_slice/stack_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "Square/strided_slice/stack_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "Square/strided_slice" + op: "StridedSlice" + input: "gradients/bert/embeddings/Gather_grad/Const" + input: "Square/strided_slice/stack" + input: "Square/strided_slice/stack_1" + input: "Square/strided_slice/stack_2" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 1 + } + } +} +node { + name: "Square/x" + op: "UnsortedSegmentSum" + input: "clip_by_global_norm/clip_by_global_norm/_0" + input: "gradients/bert/embeddings/Gather_grad/Reshape_1" + input: "Square/strided_slice" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tindices" + value { + type: DT_INT32 + } + } + attr { + key: "Tnumsegments" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square" + op: "Square" + input: "Square/x" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_5/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_5" + op: "Mul" + input: "Mul_5/x" + input: "Square" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_2" + op: "AddV2" + input: "Mul_4" + input: "Mul_5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt" + op: "Sqrt" + input: "add_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_3/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_3" + op: "AddV2" + input: "Sqrt" + input: "add_3/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_1" + op: "RealDiv" + input: "add_1" + input: "add_3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/word_embeddings" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_6/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_6" + op: "Mul" + input: "mul_6/x" + input: "ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_4" + op: "AddV2" + input: "truediv_1" + input: "mul_6" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_7" + op: "Mul" + input: "PolynomialDecay" + input: "add_4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_1" + op: "ReadVariableOp" + input: "bert/embeddings/word_embeddings" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub" + op: "Sub" + input: "ReadVariableOp_1" + input: "mul_7" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_149" + op: "AssignVariableOp" + input: "bert/embeddings/word_embeddings" + input: "sub" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_2" + op: "ReadVariableOp" + input: "bert/embeddings/word_embeddings" + input: "^AssignVariableOp_149" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_150" + op: "AssignVariableOp" + input: "bert/embeddings/word_embeddings/adam_m" + input: "add_1" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_3" + op: "ReadVariableOp" + input: "bert/embeddings/word_embeddings/adam_m" + input: "^AssignVariableOp_150" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_151" + op: "AssignVariableOp" + input: "bert/embeddings/word_embeddings/adam_v" + input: "add_2" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_4" + op: "ReadVariableOp" + input: "bert/embeddings/word_embeddings/adam_v" + input: "^AssignVariableOp_151" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 30522 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/token_type_embeddings/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\002\000\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/token_type_embeddings/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/embeddings/token_type_embeddings/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/embeddings/token_type_embeddings/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/token_type_embeddings/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/token_type_embeddings/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/embeddings/token_type_embeddings/adam_m" + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/embeddings/token_type_embeddings/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/embeddings/token_type_embeddings/adam_m" + input: "bert/embeddings/token_type_embeddings/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/token_type_embeddings/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/token_type_embeddings/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\002\000\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/token_type_embeddings/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/embeddings/token_type_embeddings/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/embeddings/token_type_embeddings/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/token_type_embeddings/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/token_type_embeddings/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/embeddings/token_type_embeddings/adam_v" + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/embeddings/token_type_embeddings/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/embeddings/token_type_embeddings/adam_v" + input: "bert/embeddings/token_type_embeddings/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/embeddings/token_type_embeddings/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/token_type_embeddings/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_8/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_8/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/token_type_embeddings/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_8" + op: "Mul" + input: "Mul_8/x" + input: "Mul_8/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_9/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_9" + op: "Mul" + input: "Mul_9/x" + input: "clip_by_global_norm/clip_by_global_norm/_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_5" + op: "AddV2" + input: "Mul_8" + input: "Mul_9" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_10/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_10/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/token_type_embeddings/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_10" + op: "Mul" + input: "Mul_10/x" + input: "Mul_10/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_1" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_11/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_11" + op: "Mul" + input: "Mul_11/x" + input: "Square_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_6" + op: "AddV2" + input: "Mul_10" + input: "Mul_11" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_1" + op: "Sqrt" + input: "add_6" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_7/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_7" + op: "AddV2" + input: "Sqrt_1" + input: "add_7/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_2" + op: "RealDiv" + input: "add_5" + input: "add_7" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_5" + op: "ReadVariableOp" + input: "bert/embeddings/token_type_embeddings" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_12/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_12" + op: "Mul" + input: "mul_12/x" + input: "ReadVariableOp_5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_8" + op: "AddV2" + input: "truediv_2" + input: "mul_12" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_13" + op: "Mul" + input: "PolynomialDecay" + input: "add_8" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_6" + op: "ReadVariableOp" + input: "bert/embeddings/token_type_embeddings" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_1" + op: "Sub" + input: "ReadVariableOp_6" + input: "mul_13" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_152" + op: "AssignVariableOp" + input: "bert/embeddings/token_type_embeddings" + input: "sub_1" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_7" + op: "ReadVariableOp" + input: "bert/embeddings/token_type_embeddings" + input: "^AssignVariableOp_152" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_153" + op: "AssignVariableOp" + input: "bert/embeddings/token_type_embeddings/adam_m" + input: "add_5" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_8" + op: "ReadVariableOp" + input: "bert/embeddings/token_type_embeddings/adam_m" + input: "^AssignVariableOp_153" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_154" + op: "AssignVariableOp" + input: "bert/embeddings/token_type_embeddings/adam_v" + input: "add_6" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_9" + op: "ReadVariableOp" + input: "bert/embeddings/token_type_embeddings/adam_v" + input: "^AssignVariableOp_154" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/embeddings/position_embeddings/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/position_embeddings/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\002\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/embeddings/position_embeddings/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/position_embeddings/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/embeddings/position_embeddings/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/embeddings/position_embeddings/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/embeddings/position_embeddings/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/position_embeddings/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/embeddings/position_embeddings/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/position_embeddings/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/embeddings/position_embeddings/adam_m" + } + } +} +node { + name: "bert/embeddings/position_embeddings/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/embeddings/position_embeddings/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/embeddings/position_embeddings/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/embeddings/position_embeddings/adam_m" + input: "bert/embeddings/position_embeddings/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/embeddings/position_embeddings/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/position_embeddings/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/embeddings/position_embeddings/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/position_embeddings/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\002\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/embeddings/position_embeddings/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/position_embeddings/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/embeddings/position_embeddings/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/embeddings/position_embeddings/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/embeddings/position_embeddings/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/position_embeddings/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/embeddings/position_embeddings/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/position_embeddings/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/embeddings/position_embeddings/adam_v" + } + } +} +node { + name: "bert/embeddings/position_embeddings/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/embeddings/position_embeddings/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/embeddings/position_embeddings/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/embeddings/position_embeddings/adam_v" + input: "bert/embeddings/position_embeddings/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/embeddings/position_embeddings/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/position_embeddings/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_14/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_14/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/position_embeddings/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_14" + op: "Mul" + input: "Mul_14/x" + input: "Mul_14/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_15/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_15" + op: "Mul" + input: "Mul_15/x" + input: "clip_by_global_norm/clip_by_global_norm/_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_9" + op: "AddV2" + input: "Mul_14" + input: "Mul_15" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_16/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_16/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/position_embeddings/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_16" + op: "Mul" + input: "Mul_16/x" + input: "Mul_16/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_2" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_17/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_17" + op: "Mul" + input: "Mul_17/x" + input: "Square_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_10" + op: "AddV2" + input: "Mul_16" + input: "Mul_17" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_2" + op: "Sqrt" + input: "add_10" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_11/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_11" + op: "AddV2" + input: "Sqrt_2" + input: "add_11/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_3" + op: "RealDiv" + input: "add_9" + input: "add_11" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_10" + op: "ReadVariableOp" + input: "bert/embeddings/position_embeddings" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_18/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_18" + op: "Mul" + input: "mul_18/x" + input: "ReadVariableOp_10" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_12" + op: "AddV2" + input: "truediv_3" + input: "mul_18" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_19" + op: "Mul" + input: "PolynomialDecay" + input: "add_12" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_11" + op: "ReadVariableOp" + input: "bert/embeddings/position_embeddings" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_2" + op: "Sub" + input: "ReadVariableOp_11" + input: "mul_19" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_155" + op: "AssignVariableOp" + input: "bert/embeddings/position_embeddings" + input: "sub_2" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_12" + op: "ReadVariableOp" + input: "bert/embeddings/position_embeddings" + input: "^AssignVariableOp_155" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_156" + op: "AssignVariableOp" + input: "bert/embeddings/position_embeddings/adam_m" + input: "add_9" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_13" + op: "ReadVariableOp" + input: "bert/embeddings/position_embeddings/adam_m" + input: "^AssignVariableOp_156" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_157" + op: "AssignVariableOp" + input: "bert/embeddings/position_embeddings/adam_v" + input: "add_10" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_14" + op: "ReadVariableOp" + input: "bert/embeddings/position_embeddings/adam_v" + input: "^AssignVariableOp_157" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 512 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/embeddings/layer_normalization/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/embeddings/layer_normalization/gamma/adam_m" + } + } +} +node { + name: "bert/embeddings/layer_normalization/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/embeddings/layer_normalization/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/embeddings/layer_normalization/gamma/adam_m" + input: "bert/embeddings/layer_normalization/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/embeddings/layer_normalization/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/layer_normalization/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/embeddings/layer_normalization/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/embeddings/layer_normalization/gamma/adam_v" + } + } +} +node { + name: "bert/embeddings/layer_normalization/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/embeddings/layer_normalization/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/embeddings/layer_normalization/gamma/adam_v" + input: "bert/embeddings/layer_normalization/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/embeddings/layer_normalization/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/layer_normalization/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_20/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_20/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/layer_normalization/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_20" + op: "Mul" + input: "Mul_20/x" + input: "Mul_20/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_21/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_21" + op: "Mul" + input: "Mul_21/x" + input: "clip_by_global_norm/clip_by_global_norm/_3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_13" + op: "AddV2" + input: "Mul_20" + input: "Mul_21" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_22/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_22/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/layer_normalization/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_22" + op: "Mul" + input: "Mul_22/x" + input: "Mul_22/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_3" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_23/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_23" + op: "Mul" + input: "Mul_23/x" + input: "Square_3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_14" + op: "AddV2" + input: "Mul_22" + input: "Mul_23" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_3" + op: "Sqrt" + input: "add_14" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_15/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_15" + op: "AddV2" + input: "Sqrt_3" + input: "add_15/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_4" + op: "RealDiv" + input: "add_13" + input: "add_15" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_24" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_15" + op: "ReadVariableOp" + input: "bert/embeddings/layer_normalization/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_3" + op: "Sub" + input: "ReadVariableOp_15" + input: "mul_24" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_158" + op: "AssignVariableOp" + input: "bert/embeddings/layer_normalization/gamma" + input: "sub_3" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_16" + op: "ReadVariableOp" + input: "bert/embeddings/layer_normalization/gamma" + input: "^AssignVariableOp_158" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_159" + op: "AssignVariableOp" + input: "bert/embeddings/layer_normalization/gamma/adam_m" + input: "add_13" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_17" + op: "ReadVariableOp" + input: "bert/embeddings/layer_normalization/gamma/adam_m" + input: "^AssignVariableOp_159" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_160" + op: "AssignVariableOp" + input: "bert/embeddings/layer_normalization/gamma/adam_v" + input: "add_14" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_18" + op: "ReadVariableOp" + input: "bert/embeddings/layer_normalization/gamma/adam_v" + input: "^AssignVariableOp_160" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/embeddings/layer_normalization/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/embeddings/layer_normalization/beta/adam_m" + } + } +} +node { + name: "bert/embeddings/layer_normalization/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/embeddings/layer_normalization/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/embeddings/layer_normalization/beta/adam_m" + input: "bert/embeddings/layer_normalization/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/embeddings/layer_normalization/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/layer_normalization/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/embeddings/layer_normalization/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/embeddings/layer_normalization/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/embeddings/layer_normalization/beta/adam_v" + } + } +} +node { + name: "bert/embeddings/layer_normalization/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/embeddings/layer_normalization/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/embeddings/layer_normalization/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/embeddings/layer_normalization/beta/adam_v" + input: "bert/embeddings/layer_normalization/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/embeddings/layer_normalization/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/layer_normalization/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_25/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_25/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/layer_normalization/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_25" + op: "Mul" + input: "Mul_25/x" + input: "Mul_25/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_26/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_26" + op: "Mul" + input: "Mul_26/x" + input: "clip_by_global_norm/clip_by_global_norm/_4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_16" + op: "AddV2" + input: "Mul_25" + input: "Mul_26" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_27/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_27/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/embeddings/layer_normalization/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_27" + op: "Mul" + input: "Mul_27/x" + input: "Mul_27/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_4" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_28/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_28" + op: "Mul" + input: "Mul_28/x" + input: "Square_4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_17" + op: "AddV2" + input: "Mul_27" + input: "Mul_28" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_4" + op: "Sqrt" + input: "add_17" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_18/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_18" + op: "AddV2" + input: "Sqrt_4" + input: "add_18/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_5" + op: "RealDiv" + input: "add_16" + input: "add_18" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_29" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_19" + op: "ReadVariableOp" + input: "bert/embeddings/layer_normalization/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_4" + op: "Sub" + input: "ReadVariableOp_19" + input: "mul_29" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_161" + op: "AssignVariableOp" + input: "bert/embeddings/layer_normalization/beta" + input: "sub_4" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_20" + op: "ReadVariableOp" + input: "bert/embeddings/layer_normalization/beta" + input: "^AssignVariableOp_161" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_162" + op: "AssignVariableOp" + input: "bert/embeddings/layer_normalization/beta/adam_m" + input: "add_16" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_21" + op: "ReadVariableOp" + input: "bert/embeddings/layer_normalization/beta/adam_m" + input: "^AssignVariableOp_162" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_163" + op: "AssignVariableOp" + input: "bert/embeddings/layer_normalization/beta/adam_v" + input: "add_17" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_22" + op: "ReadVariableOp" + input: "bert/embeddings/layer_normalization/beta/adam_v" + input: "^AssignVariableOp_163" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/self/query/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_m" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/self/query/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_v" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_30/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_30/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_30" + op: "Mul" + input: "Mul_30/x" + input: "Mul_30/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_31/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_31" + op: "Mul" + input: "Mul_31/x" + input: "clip_by_global_norm/clip_by_global_norm/_5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_19" + op: "AddV2" + input: "Mul_30" + input: "Mul_31" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_32/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_32/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_32" + op: "Mul" + input: "Mul_32/x" + input: "Mul_32/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_5" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_33/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_33" + op: "Mul" + input: "Mul_33/x" + input: "Square_5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_20" + op: "AddV2" + input: "Mul_32" + input: "Mul_33" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_5" + op: "Sqrt" + input: "add_20" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_21/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_21" + op: "AddV2" + input: "Sqrt_5" + input: "add_21/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_6" + op: "RealDiv" + input: "add_19" + input: "add_21" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_23" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_34/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_34" + op: "Mul" + input: "mul_34/x" + input: "ReadVariableOp_23" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_22" + op: "AddV2" + input: "truediv_6" + input: "mul_34" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_35" + op: "Mul" + input: "PolynomialDecay" + input: "add_22" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_24" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_5" + op: "Sub" + input: "ReadVariableOp_24" + input: "mul_35" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_164" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel" + input: "sub_5" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_25" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel" + input: "^AssignVariableOp_164" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_165" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_m" + input: "add_19" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_26" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_m" + input: "^AssignVariableOp_165" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_166" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_v" + input: "add_20" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_27" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_v" + input: "^AssignVariableOp_166" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/self/query/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_m" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/self/query/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_v" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/query/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_36/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_36/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_36" + op: "Mul" + input: "Mul_36/x" + input: "Mul_36/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_37/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_37" + op: "Mul" + input: "Mul_37/x" + input: "clip_by_global_norm/clip_by_global_norm/_6" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_23" + op: "AddV2" + input: "Mul_36" + input: "Mul_37" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_38/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_38/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_38" + op: "Mul" + input: "Mul_38/x" + input: "Mul_38/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_6" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_6" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_39/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_39" + op: "Mul" + input: "Mul_39/x" + input: "Square_6" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_24" + op: "AddV2" + input: "Mul_38" + input: "Mul_39" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_6" + op: "Sqrt" + input: "add_24" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_25/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_25" + op: "AddV2" + input: "Sqrt_6" + input: "add_25/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_7" + op: "RealDiv" + input: "add_23" + input: "add_25" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_40" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_7" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_28" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_6" + op: "Sub" + input: "ReadVariableOp_28" + input: "mul_40" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_167" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias" + input: "sub_6" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_29" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias" + input: "^AssignVariableOp_167" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_168" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_m" + input: "add_23" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_30" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_m" + input: "^AssignVariableOp_168" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_169" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_v" + input: "add_24" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_31" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_v" + input: "^AssignVariableOp_169" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/self/key/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_m" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/self/key/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_v" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_41/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_41/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_41" + op: "Mul" + input: "Mul_41/x" + input: "Mul_41/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_42/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_42" + op: "Mul" + input: "Mul_42/x" + input: "clip_by_global_norm/clip_by_global_norm/_7" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_26" + op: "AddV2" + input: "Mul_41" + input: "Mul_42" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_43/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_43/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_43" + op: "Mul" + input: "Mul_43/x" + input: "Mul_43/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_7" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_7" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_44/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_44" + op: "Mul" + input: "Mul_44/x" + input: "Square_7" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_27" + op: "AddV2" + input: "Mul_43" + input: "Mul_44" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_7" + op: "Sqrt" + input: "add_27" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_28/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_28" + op: "AddV2" + input: "Sqrt_7" + input: "add_28/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_8" + op: "RealDiv" + input: "add_26" + input: "add_28" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_32" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_45/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_45" + op: "Mul" + input: "mul_45/x" + input: "ReadVariableOp_32" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_29" + op: "AddV2" + input: "truediv_8" + input: "mul_45" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_46" + op: "Mul" + input: "PolynomialDecay" + input: "add_29" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_33" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_7" + op: "Sub" + input: "ReadVariableOp_33" + input: "mul_46" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_170" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel" + input: "sub_7" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_34" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel" + input: "^AssignVariableOp_170" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_171" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_m" + input: "add_26" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_35" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_m" + input: "^AssignVariableOp_171" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_172" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_v" + input: "add_27" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_36" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_v" + input: "^AssignVariableOp_172" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/self/key/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_m" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/self/key/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_v" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/key/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_47/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_47/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_47" + op: "Mul" + input: "Mul_47/x" + input: "Mul_47/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_48/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_48" + op: "Mul" + input: "Mul_48/x" + input: "clip_by_global_norm/clip_by_global_norm/_8" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_30" + op: "AddV2" + input: "Mul_47" + input: "Mul_48" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_49/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_49/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_49" + op: "Mul" + input: "Mul_49/x" + input: "Mul_49/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_8" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_8" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_50/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_50" + op: "Mul" + input: "Mul_50/x" + input: "Square_8" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_31" + op: "AddV2" + input: "Mul_49" + input: "Mul_50" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_8" + op: "Sqrt" + input: "add_31" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_32/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_32" + op: "AddV2" + input: "Sqrt_8" + input: "add_32/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_9" + op: "RealDiv" + input: "add_30" + input: "add_32" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_51" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_9" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_37" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_8" + op: "Sub" + input: "ReadVariableOp_37" + input: "mul_51" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_173" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias" + input: "sub_8" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_38" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias" + input: "^AssignVariableOp_173" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_174" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_m" + input: "add_30" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_39" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_m" + input: "^AssignVariableOp_174" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_175" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_v" + input: "add_31" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_40" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_v" + input: "^AssignVariableOp_175" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/self/value/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_m" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/self/value/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_v" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_52/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_52/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_52" + op: "Mul" + input: "Mul_52/x" + input: "Mul_52/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_53/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_53" + op: "Mul" + input: "Mul_53/x" + input: "clip_by_global_norm/clip_by_global_norm/_9" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_33" + op: "AddV2" + input: "Mul_52" + input: "Mul_53" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_54/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_54/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_54" + op: "Mul" + input: "Mul_54/x" + input: "Mul_54/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_9" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_9" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_55/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_55" + op: "Mul" + input: "Mul_55/x" + input: "Square_9" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_34" + op: "AddV2" + input: "Mul_54" + input: "Mul_55" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_9" + op: "Sqrt" + input: "add_34" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_35/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_35" + op: "AddV2" + input: "Sqrt_9" + input: "add_35/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_10" + op: "RealDiv" + input: "add_33" + input: "add_35" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_41" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_56/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_56" + op: "Mul" + input: "mul_56/x" + input: "ReadVariableOp_41" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_36" + op: "AddV2" + input: "truediv_10" + input: "mul_56" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_57" + op: "Mul" + input: "PolynomialDecay" + input: "add_36" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_42" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_9" + op: "Sub" + input: "ReadVariableOp_42" + input: "mul_57" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_176" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel" + input: "sub_9" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_43" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel" + input: "^AssignVariableOp_176" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_177" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_m" + input: "add_33" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_44" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_m" + input: "^AssignVariableOp_177" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_178" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_v" + input: "add_34" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_45" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_v" + input: "^AssignVariableOp_178" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/self/value/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_m" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/self/value/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_v" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/self/value/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_58/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_58/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_58" + op: "Mul" + input: "Mul_58/x" + input: "Mul_58/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_59/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_59" + op: "Mul" + input: "Mul_59/x" + input: "clip_by_global_norm/clip_by_global_norm/_10" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_37" + op: "AddV2" + input: "Mul_58" + input: "Mul_59" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_60/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_60/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_60" + op: "Mul" + input: "Mul_60/x" + input: "Mul_60/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_10" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_10" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_61/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_61" + op: "Mul" + input: "Mul_61/x" + input: "Square_10" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_38" + op: "AddV2" + input: "Mul_60" + input: "Mul_61" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_10" + op: "Sqrt" + input: "add_38" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_39/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_39" + op: "AddV2" + input: "Sqrt_10" + input: "add_39/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_11" + op: "RealDiv" + input: "add_37" + input: "add_39" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_62" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_11" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_46" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_10" + op: "Sub" + input: "ReadVariableOp_46" + input: "mul_62" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_179" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias" + input: "sub_10" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_47" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias" + input: "^AssignVariableOp_179" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_180" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_m" + input: "add_37" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_48" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_m" + input: "^AssignVariableOp_180" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_181" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_v" + input: "add_38" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_49" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_v" + input: "^AssignVariableOp_181" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_63/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_63/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_63" + op: "Mul" + input: "Mul_63/x" + input: "Mul_63/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_64/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_64" + op: "Mul" + input: "Mul_64/x" + input: "clip_by_global_norm/clip_by_global_norm/_11" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_40" + op: "AddV2" + input: "Mul_63" + input: "Mul_64" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_65/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_65/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_65" + op: "Mul" + input: "Mul_65/x" + input: "Mul_65/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_11" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_11" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_66/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_66" + op: "Mul" + input: "Mul_66/x" + input: "Square_11" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_41" + op: "AddV2" + input: "Mul_65" + input: "Mul_66" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_11" + op: "Sqrt" + input: "add_41" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_42/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_42" + op: "AddV2" + input: "Sqrt_11" + input: "add_42/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_12" + op: "RealDiv" + input: "add_40" + input: "add_42" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_50" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_67/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_67" + op: "Mul" + input: "mul_67/x" + input: "ReadVariableOp_50" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_43" + op: "AddV2" + input: "truediv_12" + input: "mul_67" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_68" + op: "Mul" + input: "PolynomialDecay" + input: "add_43" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_51" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_11" + op: "Sub" + input: "ReadVariableOp_51" + input: "mul_68" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_182" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel" + input: "sub_11" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_52" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel" + input: "^AssignVariableOp_182" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_183" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m" + input: "add_40" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_53" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m" + input: "^AssignVariableOp_183" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_184" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v" + input: "add_41" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_54" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v" + input: "^AssignVariableOp_184" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_m" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_v" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_69/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_69/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_69" + op: "Mul" + input: "Mul_69/x" + input: "Mul_69/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_70/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_70" + op: "Mul" + input: "Mul_70/x" + input: "clip_by_global_norm/clip_by_global_norm/_12" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_44" + op: "AddV2" + input: "Mul_69" + input: "Mul_70" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_71/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_71/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_71" + op: "Mul" + input: "Mul_71/x" + input: "Mul_71/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_12" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_12" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_72/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_72" + op: "Mul" + input: "Mul_72/x" + input: "Square_12" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_45" + op: "AddV2" + input: "Mul_71" + input: "Mul_72" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_12" + op: "Sqrt" + input: "add_45" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_46/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_46" + op: "AddV2" + input: "Sqrt_12" + input: "add_46/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_13" + op: "RealDiv" + input: "add_44" + input: "add_46" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_73" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_13" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_55" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_12" + op: "Sub" + input: "ReadVariableOp_55" + input: "mul_73" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_185" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias" + input: "sub_12" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_56" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias" + input: "^AssignVariableOp_185" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_186" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_m" + input: "add_44" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_57" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_m" + input: "^AssignVariableOp_186" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_187" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_v" + input: "add_45" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_58" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_v" + input: "^AssignVariableOp_187" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_74/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_74/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_74" + op: "Mul" + input: "Mul_74/x" + input: "Mul_74/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_75/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_75" + op: "Mul" + input: "Mul_75/x" + input: "clip_by_global_norm/clip_by_global_norm/_13" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_47" + op: "AddV2" + input: "Mul_74" + input: "Mul_75" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_76/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_76/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_76" + op: "Mul" + input: "Mul_76/x" + input: "Mul_76/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_13" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_13" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_77/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_77" + op: "Mul" + input: "Mul_77/x" + input: "Square_13" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_48" + op: "AddV2" + input: "Mul_76" + input: "Mul_77" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_13" + op: "Sqrt" + input: "add_48" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_49/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_49" + op: "AddV2" + input: "Sqrt_13" + input: "add_49/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_14" + op: "RealDiv" + input: "add_47" + input: "add_49" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_78" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_14" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_59" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_13" + op: "Sub" + input: "ReadVariableOp_59" + input: "mul_78" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_188" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma" + input: "sub_13" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_60" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma" + input: "^AssignVariableOp_188" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_189" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m" + input: "add_47" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_61" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m" + input: "^AssignVariableOp_189" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_190" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v" + input: "add_48" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_62" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v" + input: "^AssignVariableOp_190" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_79/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_79/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_79" + op: "Mul" + input: "Mul_79/x" + input: "Mul_79/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_80/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_80" + op: "Mul" + input: "Mul_80/x" + input: "clip_by_global_norm/clip_by_global_norm/_14" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_50" + op: "AddV2" + input: "Mul_79" + input: "Mul_80" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_81/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_81/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_81" + op: "Mul" + input: "Mul_81/x" + input: "Mul_81/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_14" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_14" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_82/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_82" + op: "Mul" + input: "Mul_82/x" + input: "Square_14" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_51" + op: "AddV2" + input: "Mul_81" + input: "Mul_82" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_14" + op: "Sqrt" + input: "add_51" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_52/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_52" + op: "AddV2" + input: "Sqrt_14" + input: "add_52/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_15" + op: "RealDiv" + input: "add_50" + input: "add_52" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_83" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_15" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_63" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_14" + op: "Sub" + input: "ReadVariableOp_63" + input: "mul_83" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_191" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta" + input: "sub_14" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_64" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta" + input: "^AssignVariableOp_191" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_192" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m" + input: "add_50" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_65" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m" + input: "^AssignVariableOp_192" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_193" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v" + input: "add_51" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_66" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v" + input: "^AssignVariableOp_193" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_84/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_84/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_84" + op: "Mul" + input: "Mul_84/x" + input: "Mul_84/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_85/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_85" + op: "Mul" + input: "Mul_85/x" + input: "clip_by_global_norm/clip_by_global_norm/_15" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_53" + op: "AddV2" + input: "Mul_84" + input: "Mul_85" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_86/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_86/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_86" + op: "Mul" + input: "Mul_86/x" + input: "Mul_86/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_15" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_15" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_87/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_87" + op: "Mul" + input: "Mul_87/x" + input: "Square_15" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_54" + op: "AddV2" + input: "Mul_86" + input: "Mul_87" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_15" + op: "Sqrt" + input: "add_54" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_55/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_55" + op: "AddV2" + input: "Sqrt_15" + input: "add_55/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_16" + op: "RealDiv" + input: "add_53" + input: "add_55" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_67" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_88/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_88" + op: "Mul" + input: "mul_88/x" + input: "ReadVariableOp_67" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_56" + op: "AddV2" + input: "truediv_16" + input: "mul_88" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_89" + op: "Mul" + input: "PolynomialDecay" + input: "add_56" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_68" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_15" + op: "Sub" + input: "ReadVariableOp_68" + input: "mul_89" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_194" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel" + input: "sub_15" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_69" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel" + input: "^AssignVariableOp_194" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_195" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m" + input: "add_53" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_70" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m" + input: "^AssignVariableOp_195" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_196" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v" + input: "add_54" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_71" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v" + input: "^AssignVariableOp_196" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/intermediate/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_m" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/intermediate/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_v" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_90/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_90/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_90" + op: "Mul" + input: "Mul_90/x" + input: "Mul_90/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_91/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_91" + op: "Mul" + input: "Mul_91/x" + input: "clip_by_global_norm/clip_by_global_norm/_16" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_57" + op: "AddV2" + input: "Mul_90" + input: "Mul_91" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_92/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_92/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_92" + op: "Mul" + input: "Mul_92/x" + input: "Mul_92/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_16" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_16" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_93/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_93" + op: "Mul" + input: "Mul_93/x" + input: "Square_16" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_58" + op: "AddV2" + input: "Mul_92" + input: "Mul_93" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_16" + op: "Sqrt" + input: "add_58" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_59/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_59" + op: "AddV2" + input: "Sqrt_16" + input: "add_59/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_17" + op: "RealDiv" + input: "add_57" + input: "add_59" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_94" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_17" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_72" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_16" + op: "Sub" + input: "ReadVariableOp_72" + input: "mul_94" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_197" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias" + input: "sub_16" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_73" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias" + input: "^AssignVariableOp_197" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_198" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_m" + input: "add_57" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_74" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_m" + input: "^AssignVariableOp_198" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_199" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_v" + input: "add_58" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_75" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_v" + input: "^AssignVariableOp_199" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_0/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_0/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel/adam_m" + input: "bert/encoder/layer_0/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_0/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_0/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel/adam_v" + input: "bert/encoder/layer_0/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_95/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_95/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_95" + op: "Mul" + input: "Mul_95/x" + input: "Mul_95/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_96/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_96" + op: "Mul" + input: "Mul_96/x" + input: "clip_by_global_norm/clip_by_global_norm/_17" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_60" + op: "AddV2" + input: "Mul_95" + input: "Mul_96" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_97/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_97/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_97" + op: "Mul" + input: "Mul_97/x" + input: "Mul_97/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_17" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_17" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_98/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_98" + op: "Mul" + input: "Mul_98/x" + input: "Square_17" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_61" + op: "AddV2" + input: "Mul_97" + input: "Mul_98" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_17" + op: "Sqrt" + input: "add_61" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_62/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_62" + op: "AddV2" + input: "Sqrt_17" + input: "add_62/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_18" + op: "RealDiv" + input: "add_60" + input: "add_62" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_76" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_99/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_99" + op: "Mul" + input: "mul_99/x" + input: "ReadVariableOp_76" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_63" + op: "AddV2" + input: "truediv_18" + input: "mul_99" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_100" + op: "Mul" + input: "PolynomialDecay" + input: "add_63" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_77" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_17" + op: "Sub" + input: "ReadVariableOp_77" + input: "mul_100" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_200" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel" + input: "sub_17" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_78" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel" + input: "^AssignVariableOp_200" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_201" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel/adam_m" + input: "add_60" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_79" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel/adam_m" + input: "^AssignVariableOp_201" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_202" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel/adam_v" + input: "add_61" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_80" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel/adam_v" + input: "^AssignVariableOp_202" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/dense/bias/adam_m" + input: "bert/encoder/layer_0/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/dense/bias/adam_v" + input: "bert/encoder/layer_0/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_101/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_101/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_101" + op: "Mul" + input: "Mul_101/x" + input: "Mul_101/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_102/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_102" + op: "Mul" + input: "Mul_102/x" + input: "clip_by_global_norm/clip_by_global_norm/_18" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_64" + op: "AddV2" + input: "Mul_101" + input: "Mul_102" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_103/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_103/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_103" + op: "Mul" + input: "Mul_103/x" + input: "Mul_103/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_18" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_18" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_104/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_104" + op: "Mul" + input: "Mul_104/x" + input: "Square_18" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_65" + op: "AddV2" + input: "Mul_103" + input: "Mul_104" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_18" + op: "Sqrt" + input: "add_65" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_66/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_66" + op: "AddV2" + input: "Sqrt_18" + input: "add_66/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_19" + op: "RealDiv" + input: "add_64" + input: "add_66" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_105" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_19" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_81" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_18" + op: "Sub" + input: "ReadVariableOp_81" + input: "mul_105" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_203" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/dense/bias" + input: "sub_18" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_82" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/bias" + input: "^AssignVariableOp_203" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_204" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/dense/bias/adam_m" + input: "add_64" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_83" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/bias/adam_m" + input: "^AssignVariableOp_204" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_205" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/dense/bias/adam_v" + input: "add_65" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_84" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/bias/adam_v" + input: "^AssignVariableOp_205" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_106/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_106/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_106" + op: "Mul" + input: "Mul_106/x" + input: "Mul_106/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_107/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_107" + op: "Mul" + input: "Mul_107/x" + input: "clip_by_global_norm/clip_by_global_norm/_19" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_67" + op: "AddV2" + input: "Mul_106" + input: "Mul_107" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_108/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_108/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_108" + op: "Mul" + input: "Mul_108/x" + input: "Mul_108/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_19" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_19" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_109/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_109" + op: "Mul" + input: "Mul_109/x" + input: "Square_19" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_68" + op: "AddV2" + input: "Mul_108" + input: "Mul_109" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_19" + op: "Sqrt" + input: "add_68" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_69/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_69" + op: "AddV2" + input: "Sqrt_19" + input: "add_69/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_20" + op: "RealDiv" + input: "add_67" + input: "add_69" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_110" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_20" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_85" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_19" + op: "Sub" + input: "ReadVariableOp_85" + input: "mul_110" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_206" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma" + input: "sub_19" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_86" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma" + input: "^AssignVariableOp_206" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_207" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m" + input: "add_67" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_87" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m" + input: "^AssignVariableOp_207" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_208" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v" + input: "add_68" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_88" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v" + input: "^AssignVariableOp_208" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_111/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_111/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_111" + op: "Mul" + input: "Mul_111/x" + input: "Mul_111/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_112/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_112" + op: "Mul" + input: "Mul_112/x" + input: "clip_by_global_norm/clip_by_global_norm/_20" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_70" + op: "AddV2" + input: "Mul_111" + input: "Mul_112" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_113/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_113/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_113" + op: "Mul" + input: "Mul_113/x" + input: "Mul_113/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_20" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_20" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_114/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_114" + op: "Mul" + input: "Mul_114/x" + input: "Square_20" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_71" + op: "AddV2" + input: "Mul_113" + input: "Mul_114" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_20" + op: "Sqrt" + input: "add_71" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_72/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_72" + op: "AddV2" + input: "Sqrt_20" + input: "add_72/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_21" + op: "RealDiv" + input: "add_70" + input: "add_72" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_115" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_21" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_89" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_20" + op: "Sub" + input: "ReadVariableOp_89" + input: "mul_115" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_209" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta" + input: "sub_20" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_90" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta" + input: "^AssignVariableOp_209" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_210" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m" + input: "add_70" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_91" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m" + input: "^AssignVariableOp_210" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_211" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v" + input: "add_71" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_92" + op: "ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v" + input: "^AssignVariableOp_211" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/self/query/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_m" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/self/query/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_v" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_116/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_116/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_116" + op: "Mul" + input: "Mul_116/x" + input: "Mul_116/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_117/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_117" + op: "Mul" + input: "Mul_117/x" + input: "clip_by_global_norm/clip_by_global_norm/_21" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_73" + op: "AddV2" + input: "Mul_116" + input: "Mul_117" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_118/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_118/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_118" + op: "Mul" + input: "Mul_118/x" + input: "Mul_118/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_21" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_21" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_119/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_119" + op: "Mul" + input: "Mul_119/x" + input: "Square_21" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_74" + op: "AddV2" + input: "Mul_118" + input: "Mul_119" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_21" + op: "Sqrt" + input: "add_74" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_75/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_75" + op: "AddV2" + input: "Sqrt_21" + input: "add_75/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_22" + op: "RealDiv" + input: "add_73" + input: "add_75" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_93" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_120/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_120" + op: "Mul" + input: "mul_120/x" + input: "ReadVariableOp_93" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_76" + op: "AddV2" + input: "truediv_22" + input: "mul_120" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_121" + op: "Mul" + input: "PolynomialDecay" + input: "add_76" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_94" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_21" + op: "Sub" + input: "ReadVariableOp_94" + input: "mul_121" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_212" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel" + input: "sub_21" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_95" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel" + input: "^AssignVariableOp_212" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_213" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_m" + input: "add_73" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_96" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_m" + input: "^AssignVariableOp_213" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_214" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_v" + input: "add_74" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_97" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_v" + input: "^AssignVariableOp_214" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/self/query/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_m" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/self/query/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_v" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/query/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_122/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_122/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_122" + op: "Mul" + input: "Mul_122/x" + input: "Mul_122/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_123/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_123" + op: "Mul" + input: "Mul_123/x" + input: "clip_by_global_norm/clip_by_global_norm/_22" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_77" + op: "AddV2" + input: "Mul_122" + input: "Mul_123" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_124/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_124/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_124" + op: "Mul" + input: "Mul_124/x" + input: "Mul_124/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_22" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_22" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_125/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_125" + op: "Mul" + input: "Mul_125/x" + input: "Square_22" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_78" + op: "AddV2" + input: "Mul_124" + input: "Mul_125" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_22" + op: "Sqrt" + input: "add_78" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_79/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_79" + op: "AddV2" + input: "Sqrt_22" + input: "add_79/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_23" + op: "RealDiv" + input: "add_77" + input: "add_79" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_126" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_23" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_98" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_22" + op: "Sub" + input: "ReadVariableOp_98" + input: "mul_126" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_215" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias" + input: "sub_22" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_99" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias" + input: "^AssignVariableOp_215" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_216" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_m" + input: "add_77" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_100" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_m" + input: "^AssignVariableOp_216" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_217" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_v" + input: "add_78" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_101" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_v" + input: "^AssignVariableOp_217" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/self/key/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_m" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/self/key/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_v" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_127/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_127/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_127" + op: "Mul" + input: "Mul_127/x" + input: "Mul_127/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_128/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_128" + op: "Mul" + input: "Mul_128/x" + input: "clip_by_global_norm/clip_by_global_norm/_23" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_80" + op: "AddV2" + input: "Mul_127" + input: "Mul_128" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_129/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_129/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_129" + op: "Mul" + input: "Mul_129/x" + input: "Mul_129/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_23" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_23" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_130/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_130" + op: "Mul" + input: "Mul_130/x" + input: "Square_23" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_81" + op: "AddV2" + input: "Mul_129" + input: "Mul_130" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_23" + op: "Sqrt" + input: "add_81" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_82/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_82" + op: "AddV2" + input: "Sqrt_23" + input: "add_82/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_24" + op: "RealDiv" + input: "add_80" + input: "add_82" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_102" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_131/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_131" + op: "Mul" + input: "mul_131/x" + input: "ReadVariableOp_102" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_83" + op: "AddV2" + input: "truediv_24" + input: "mul_131" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_132" + op: "Mul" + input: "PolynomialDecay" + input: "add_83" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_103" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_23" + op: "Sub" + input: "ReadVariableOp_103" + input: "mul_132" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_218" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel" + input: "sub_23" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_104" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel" + input: "^AssignVariableOp_218" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_219" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_m" + input: "add_80" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_105" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_m" + input: "^AssignVariableOp_219" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_220" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_v" + input: "add_81" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_106" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_v" + input: "^AssignVariableOp_220" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/self/key/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_m" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/self/key/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_v" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/key/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_133/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_133/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_133" + op: "Mul" + input: "Mul_133/x" + input: "Mul_133/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_134/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_134" + op: "Mul" + input: "Mul_134/x" + input: "clip_by_global_norm/clip_by_global_norm/_24" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_84" + op: "AddV2" + input: "Mul_133" + input: "Mul_134" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_135/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_135/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_135" + op: "Mul" + input: "Mul_135/x" + input: "Mul_135/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_24" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_24" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_136/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_136" + op: "Mul" + input: "Mul_136/x" + input: "Square_24" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_85" + op: "AddV2" + input: "Mul_135" + input: "Mul_136" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_24" + op: "Sqrt" + input: "add_85" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_86/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_86" + op: "AddV2" + input: "Sqrt_24" + input: "add_86/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_25" + op: "RealDiv" + input: "add_84" + input: "add_86" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_137" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_25" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_107" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_24" + op: "Sub" + input: "ReadVariableOp_107" + input: "mul_137" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_221" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias" + input: "sub_24" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_108" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias" + input: "^AssignVariableOp_221" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_222" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_m" + input: "add_84" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_109" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_m" + input: "^AssignVariableOp_222" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_223" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_v" + input: "add_85" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_110" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_v" + input: "^AssignVariableOp_223" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/self/value/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_m" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/self/value/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_v" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_138/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_138/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_138" + op: "Mul" + input: "Mul_138/x" + input: "Mul_138/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_139/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_139" + op: "Mul" + input: "Mul_139/x" + input: "clip_by_global_norm/clip_by_global_norm/_25" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_87" + op: "AddV2" + input: "Mul_138" + input: "Mul_139" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_140/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_140/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_140" + op: "Mul" + input: "Mul_140/x" + input: "Mul_140/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_25" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_25" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_141/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_141" + op: "Mul" + input: "Mul_141/x" + input: "Square_25" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_88" + op: "AddV2" + input: "Mul_140" + input: "Mul_141" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_25" + op: "Sqrt" + input: "add_88" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_89/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_89" + op: "AddV2" + input: "Sqrt_25" + input: "add_89/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_26" + op: "RealDiv" + input: "add_87" + input: "add_89" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_111" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_142/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_142" + op: "Mul" + input: "mul_142/x" + input: "ReadVariableOp_111" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_90" + op: "AddV2" + input: "truediv_26" + input: "mul_142" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_143" + op: "Mul" + input: "PolynomialDecay" + input: "add_90" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_112" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_25" + op: "Sub" + input: "ReadVariableOp_112" + input: "mul_143" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_224" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel" + input: "sub_25" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_113" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel" + input: "^AssignVariableOp_224" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_225" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_m" + input: "add_87" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_114" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_m" + input: "^AssignVariableOp_225" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_226" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_v" + input: "add_88" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_115" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_v" + input: "^AssignVariableOp_226" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/self/value/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_m" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/self/value/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_v" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/self/value/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_144/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_144/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_144" + op: "Mul" + input: "Mul_144/x" + input: "Mul_144/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_145/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_145" + op: "Mul" + input: "Mul_145/x" + input: "clip_by_global_norm/clip_by_global_norm/_26" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_91" + op: "AddV2" + input: "Mul_144" + input: "Mul_145" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_146/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_146/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_146" + op: "Mul" + input: "Mul_146/x" + input: "Mul_146/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_26" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_26" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_147/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_147" + op: "Mul" + input: "Mul_147/x" + input: "Square_26" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_92" + op: "AddV2" + input: "Mul_146" + input: "Mul_147" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_26" + op: "Sqrt" + input: "add_92" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_93/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_93" + op: "AddV2" + input: "Sqrt_26" + input: "add_93/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_27" + op: "RealDiv" + input: "add_91" + input: "add_93" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_148" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_27" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_116" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_26" + op: "Sub" + input: "ReadVariableOp_116" + input: "mul_148" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_227" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias" + input: "sub_26" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_117" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias" + input: "^AssignVariableOp_227" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_228" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_m" + input: "add_91" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_118" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_m" + input: "^AssignVariableOp_228" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_229" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_v" + input: "add_92" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_119" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_v" + input: "^AssignVariableOp_229" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_149/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_149/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_149" + op: "Mul" + input: "Mul_149/x" + input: "Mul_149/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_150/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_150" + op: "Mul" + input: "Mul_150/x" + input: "clip_by_global_norm/clip_by_global_norm/_27" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_94" + op: "AddV2" + input: "Mul_149" + input: "Mul_150" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_151/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_151/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_151" + op: "Mul" + input: "Mul_151/x" + input: "Mul_151/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_27" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_27" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_152/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_152" + op: "Mul" + input: "Mul_152/x" + input: "Square_27" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_95" + op: "AddV2" + input: "Mul_151" + input: "Mul_152" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_27" + op: "Sqrt" + input: "add_95" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_96/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_96" + op: "AddV2" + input: "Sqrt_27" + input: "add_96/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_28" + op: "RealDiv" + input: "add_94" + input: "add_96" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_120" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_153/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_153" + op: "Mul" + input: "mul_153/x" + input: "ReadVariableOp_120" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_97" + op: "AddV2" + input: "truediv_28" + input: "mul_153" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_154" + op: "Mul" + input: "PolynomialDecay" + input: "add_97" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_121" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_27" + op: "Sub" + input: "ReadVariableOp_121" + input: "mul_154" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_230" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel" + input: "sub_27" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_122" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel" + input: "^AssignVariableOp_230" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_231" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m" + input: "add_94" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_123" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m" + input: "^AssignVariableOp_231" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_232" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v" + input: "add_95" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_124" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v" + input: "^AssignVariableOp_232" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_m" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_v" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_155/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_155/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_155" + op: "Mul" + input: "Mul_155/x" + input: "Mul_155/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_156/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_156" + op: "Mul" + input: "Mul_156/x" + input: "clip_by_global_norm/clip_by_global_norm/_28" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_98" + op: "AddV2" + input: "Mul_155" + input: "Mul_156" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_157/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_157/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_157" + op: "Mul" + input: "Mul_157/x" + input: "Mul_157/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_28" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_28" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_158/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_158" + op: "Mul" + input: "Mul_158/x" + input: "Square_28" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_99" + op: "AddV2" + input: "Mul_157" + input: "Mul_158" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_28" + op: "Sqrt" + input: "add_99" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_100/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_100" + op: "AddV2" + input: "Sqrt_28" + input: "add_100/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_29" + op: "RealDiv" + input: "add_98" + input: "add_100" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_159" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_29" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_125" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_28" + op: "Sub" + input: "ReadVariableOp_125" + input: "mul_159" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_233" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias" + input: "sub_28" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_126" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias" + input: "^AssignVariableOp_233" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_234" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_m" + input: "add_98" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_127" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_m" + input: "^AssignVariableOp_234" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_235" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_v" + input: "add_99" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_128" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_v" + input: "^AssignVariableOp_235" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_160/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_160/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_160" + op: "Mul" + input: "Mul_160/x" + input: "Mul_160/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_161/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_161" + op: "Mul" + input: "Mul_161/x" + input: "clip_by_global_norm/clip_by_global_norm/_29" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_101" + op: "AddV2" + input: "Mul_160" + input: "Mul_161" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_162/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_162/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_162" + op: "Mul" + input: "Mul_162/x" + input: "Mul_162/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_29" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_29" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_163/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_163" + op: "Mul" + input: "Mul_163/x" + input: "Square_29" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_102" + op: "AddV2" + input: "Mul_162" + input: "Mul_163" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_29" + op: "Sqrt" + input: "add_102" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_103/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_103" + op: "AddV2" + input: "Sqrt_29" + input: "add_103/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_30" + op: "RealDiv" + input: "add_101" + input: "add_103" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_164" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_30" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_129" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_29" + op: "Sub" + input: "ReadVariableOp_129" + input: "mul_164" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_236" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma" + input: "sub_29" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_130" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma" + input: "^AssignVariableOp_236" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_237" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m" + input: "add_101" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_131" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m" + input: "^AssignVariableOp_237" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_238" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v" + input: "add_102" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_132" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v" + input: "^AssignVariableOp_238" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_165/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_165/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_165" + op: "Mul" + input: "Mul_165/x" + input: "Mul_165/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_166/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_166" + op: "Mul" + input: "Mul_166/x" + input: "clip_by_global_norm/clip_by_global_norm/_30" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_104" + op: "AddV2" + input: "Mul_165" + input: "Mul_166" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_167/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_167/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_167" + op: "Mul" + input: "Mul_167/x" + input: "Mul_167/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_30" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_30" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_168/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_168" + op: "Mul" + input: "Mul_168/x" + input: "Square_30" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_105" + op: "AddV2" + input: "Mul_167" + input: "Mul_168" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_30" + op: "Sqrt" + input: "add_105" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_106/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_106" + op: "AddV2" + input: "Sqrt_30" + input: "add_106/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_31" + op: "RealDiv" + input: "add_104" + input: "add_106" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_169" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_31" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_133" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_30" + op: "Sub" + input: "ReadVariableOp_133" + input: "mul_169" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_239" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta" + input: "sub_30" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_134" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta" + input: "^AssignVariableOp_239" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_240" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m" + input: "add_104" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_135" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m" + input: "^AssignVariableOp_240" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_241" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v" + input: "add_105" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_136" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v" + input: "^AssignVariableOp_241" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_170/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_170/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_170" + op: "Mul" + input: "Mul_170/x" + input: "Mul_170/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_171/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_171" + op: "Mul" + input: "Mul_171/x" + input: "clip_by_global_norm/clip_by_global_norm/_31" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_107" + op: "AddV2" + input: "Mul_170" + input: "Mul_171" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_172/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_172/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_172" + op: "Mul" + input: "Mul_172/x" + input: "Mul_172/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_31" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_31" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_173/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_173" + op: "Mul" + input: "Mul_173/x" + input: "Square_31" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_108" + op: "AddV2" + input: "Mul_172" + input: "Mul_173" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_31" + op: "Sqrt" + input: "add_108" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_109/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_109" + op: "AddV2" + input: "Sqrt_31" + input: "add_109/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_32" + op: "RealDiv" + input: "add_107" + input: "add_109" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_137" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_174/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_174" + op: "Mul" + input: "mul_174/x" + input: "ReadVariableOp_137" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_110" + op: "AddV2" + input: "truediv_32" + input: "mul_174" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_175" + op: "Mul" + input: "PolynomialDecay" + input: "add_110" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_138" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_31" + op: "Sub" + input: "ReadVariableOp_138" + input: "mul_175" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_242" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel" + input: "sub_31" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_139" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel" + input: "^AssignVariableOp_242" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_243" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m" + input: "add_107" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_140" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m" + input: "^AssignVariableOp_243" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_244" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v" + input: "add_108" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_141" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v" + input: "^AssignVariableOp_244" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/intermediate/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_m" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/intermediate/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_v" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_176/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_176/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_176" + op: "Mul" + input: "Mul_176/x" + input: "Mul_176/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_177/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_177" + op: "Mul" + input: "Mul_177/x" + input: "clip_by_global_norm/clip_by_global_norm/_32" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_111" + op: "AddV2" + input: "Mul_176" + input: "Mul_177" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_178/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_178/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_178" + op: "Mul" + input: "Mul_178/x" + input: "Mul_178/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_32" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_32" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_179/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_179" + op: "Mul" + input: "Mul_179/x" + input: "Square_32" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_112" + op: "AddV2" + input: "Mul_178" + input: "Mul_179" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_32" + op: "Sqrt" + input: "add_112" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_113/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_113" + op: "AddV2" + input: "Sqrt_32" + input: "add_113/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_33" + op: "RealDiv" + input: "add_111" + input: "add_113" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_180" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_33" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_142" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_32" + op: "Sub" + input: "ReadVariableOp_142" + input: "mul_180" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_245" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias" + input: "sub_32" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_143" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias" + input: "^AssignVariableOp_245" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_246" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_m" + input: "add_111" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_144" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_m" + input: "^AssignVariableOp_246" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_247" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_v" + input: "add_112" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_145" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_v" + input: "^AssignVariableOp_247" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_1/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_1/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel/adam_m" + input: "bert/encoder/layer_1/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_1/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_1/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel/adam_v" + input: "bert/encoder/layer_1/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_181/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_181/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_181" + op: "Mul" + input: "Mul_181/x" + input: "Mul_181/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_182/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_182" + op: "Mul" + input: "Mul_182/x" + input: "clip_by_global_norm/clip_by_global_norm/_33" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_114" + op: "AddV2" + input: "Mul_181" + input: "Mul_182" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_183/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_183/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_183" + op: "Mul" + input: "Mul_183/x" + input: "Mul_183/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_33" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_33" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_184/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_184" + op: "Mul" + input: "Mul_184/x" + input: "Square_33" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_115" + op: "AddV2" + input: "Mul_183" + input: "Mul_184" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_33" + op: "Sqrt" + input: "add_115" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_116/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_116" + op: "AddV2" + input: "Sqrt_33" + input: "add_116/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_34" + op: "RealDiv" + input: "add_114" + input: "add_116" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_146" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_185/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_185" + op: "Mul" + input: "mul_185/x" + input: "ReadVariableOp_146" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_117" + op: "AddV2" + input: "truediv_34" + input: "mul_185" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_186" + op: "Mul" + input: "PolynomialDecay" + input: "add_117" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_147" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_33" + op: "Sub" + input: "ReadVariableOp_147" + input: "mul_186" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_248" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel" + input: "sub_33" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_148" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel" + input: "^AssignVariableOp_248" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_249" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel/adam_m" + input: "add_114" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_149" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel/adam_m" + input: "^AssignVariableOp_249" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_250" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel/adam_v" + input: "add_115" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_150" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel/adam_v" + input: "^AssignVariableOp_250" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/dense/bias/adam_m" + input: "bert/encoder/layer_1/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/dense/bias/adam_v" + input: "bert/encoder/layer_1/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_187/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_187/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_187" + op: "Mul" + input: "Mul_187/x" + input: "Mul_187/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_188/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_188" + op: "Mul" + input: "Mul_188/x" + input: "clip_by_global_norm/clip_by_global_norm/_34" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_118" + op: "AddV2" + input: "Mul_187" + input: "Mul_188" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_189/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_189/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_189" + op: "Mul" + input: "Mul_189/x" + input: "Mul_189/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_34" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_34" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_190/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_190" + op: "Mul" + input: "Mul_190/x" + input: "Square_34" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_119" + op: "AddV2" + input: "Mul_189" + input: "Mul_190" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_34" + op: "Sqrt" + input: "add_119" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_120/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_120" + op: "AddV2" + input: "Sqrt_34" + input: "add_120/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_35" + op: "RealDiv" + input: "add_118" + input: "add_120" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_191" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_35" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_151" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_34" + op: "Sub" + input: "ReadVariableOp_151" + input: "mul_191" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_251" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/dense/bias" + input: "sub_34" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_152" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/bias" + input: "^AssignVariableOp_251" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_252" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/dense/bias/adam_m" + input: "add_118" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_153" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/bias/adam_m" + input: "^AssignVariableOp_252" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_253" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/dense/bias/adam_v" + input: "add_119" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_154" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/bias/adam_v" + input: "^AssignVariableOp_253" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_192/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_192/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_192" + op: "Mul" + input: "Mul_192/x" + input: "Mul_192/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_193/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_193" + op: "Mul" + input: "Mul_193/x" + input: "clip_by_global_norm/clip_by_global_norm/_35" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_121" + op: "AddV2" + input: "Mul_192" + input: "Mul_193" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_194/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_194/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_194" + op: "Mul" + input: "Mul_194/x" + input: "Mul_194/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_35" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_35" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_195/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_195" + op: "Mul" + input: "Mul_195/x" + input: "Square_35" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_122" + op: "AddV2" + input: "Mul_194" + input: "Mul_195" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_35" + op: "Sqrt" + input: "add_122" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_123/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_123" + op: "AddV2" + input: "Sqrt_35" + input: "add_123/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_36" + op: "RealDiv" + input: "add_121" + input: "add_123" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_196" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_36" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_155" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_35" + op: "Sub" + input: "ReadVariableOp_155" + input: "mul_196" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_254" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma" + input: "sub_35" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_156" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma" + input: "^AssignVariableOp_254" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_255" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m" + input: "add_121" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_157" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m" + input: "^AssignVariableOp_255" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_256" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v" + input: "add_122" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_158" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v" + input: "^AssignVariableOp_256" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_197/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_197/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_197" + op: "Mul" + input: "Mul_197/x" + input: "Mul_197/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_198/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_198" + op: "Mul" + input: "Mul_198/x" + input: "clip_by_global_norm/clip_by_global_norm/_36" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_124" + op: "AddV2" + input: "Mul_197" + input: "Mul_198" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_199/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_199/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_199" + op: "Mul" + input: "Mul_199/x" + input: "Mul_199/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_36" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_36" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_200/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_200" + op: "Mul" + input: "Mul_200/x" + input: "Square_36" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_125" + op: "AddV2" + input: "Mul_199" + input: "Mul_200" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_36" + op: "Sqrt" + input: "add_125" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_126/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_126" + op: "AddV2" + input: "Sqrt_36" + input: "add_126/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_37" + op: "RealDiv" + input: "add_124" + input: "add_126" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_201" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_37" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_159" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_36" + op: "Sub" + input: "ReadVariableOp_159" + input: "mul_201" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_257" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta" + input: "sub_36" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_160" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta" + input: "^AssignVariableOp_257" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_258" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m" + input: "add_124" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_161" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m" + input: "^AssignVariableOp_258" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_259" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v" + input: "add_125" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_162" + op: "ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v" + input: "^AssignVariableOp_259" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/self/query/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_m" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/self/query/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_v" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_202/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_202/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_202" + op: "Mul" + input: "Mul_202/x" + input: "Mul_202/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_203/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_203" + op: "Mul" + input: "Mul_203/x" + input: "clip_by_global_norm/clip_by_global_norm/_37" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_127" + op: "AddV2" + input: "Mul_202" + input: "Mul_203" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_204/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_204/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_204" + op: "Mul" + input: "Mul_204/x" + input: "Mul_204/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_37" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_37" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_205/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_205" + op: "Mul" + input: "Mul_205/x" + input: "Square_37" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_128" + op: "AddV2" + input: "Mul_204" + input: "Mul_205" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_37" + op: "Sqrt" + input: "add_128" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_129/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_129" + op: "AddV2" + input: "Sqrt_37" + input: "add_129/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_38" + op: "RealDiv" + input: "add_127" + input: "add_129" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_163" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_206/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_206" + op: "Mul" + input: "mul_206/x" + input: "ReadVariableOp_163" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_130" + op: "AddV2" + input: "truediv_38" + input: "mul_206" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_207" + op: "Mul" + input: "PolynomialDecay" + input: "add_130" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_164" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_37" + op: "Sub" + input: "ReadVariableOp_164" + input: "mul_207" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_260" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel" + input: "sub_37" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_165" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel" + input: "^AssignVariableOp_260" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_261" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_m" + input: "add_127" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_166" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_m" + input: "^AssignVariableOp_261" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_262" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_v" + input: "add_128" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_167" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_v" + input: "^AssignVariableOp_262" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/self/query/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_m" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/self/query/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_v" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/query/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_208/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_208/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_208" + op: "Mul" + input: "Mul_208/x" + input: "Mul_208/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_209/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_209" + op: "Mul" + input: "Mul_209/x" + input: "clip_by_global_norm/clip_by_global_norm/_38" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_131" + op: "AddV2" + input: "Mul_208" + input: "Mul_209" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_210/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_210/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_210" + op: "Mul" + input: "Mul_210/x" + input: "Mul_210/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_38" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_38" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_211/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_211" + op: "Mul" + input: "Mul_211/x" + input: "Square_38" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_132" + op: "AddV2" + input: "Mul_210" + input: "Mul_211" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_38" + op: "Sqrt" + input: "add_132" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_133/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_133" + op: "AddV2" + input: "Sqrt_38" + input: "add_133/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_39" + op: "RealDiv" + input: "add_131" + input: "add_133" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_212" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_39" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_168" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_38" + op: "Sub" + input: "ReadVariableOp_168" + input: "mul_212" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_263" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias" + input: "sub_38" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_169" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias" + input: "^AssignVariableOp_263" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_264" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_m" + input: "add_131" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_170" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_m" + input: "^AssignVariableOp_264" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_265" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_v" + input: "add_132" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_171" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_v" + input: "^AssignVariableOp_265" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/self/key/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_m" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/self/key/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_v" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_213/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_213/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_213" + op: "Mul" + input: "Mul_213/x" + input: "Mul_213/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_214/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_214" + op: "Mul" + input: "Mul_214/x" + input: "clip_by_global_norm/clip_by_global_norm/_39" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_134" + op: "AddV2" + input: "Mul_213" + input: "Mul_214" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_215/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_215/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_215" + op: "Mul" + input: "Mul_215/x" + input: "Mul_215/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_39" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_39" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_216/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_216" + op: "Mul" + input: "Mul_216/x" + input: "Square_39" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_135" + op: "AddV2" + input: "Mul_215" + input: "Mul_216" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_39" + op: "Sqrt" + input: "add_135" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_136/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_136" + op: "AddV2" + input: "Sqrt_39" + input: "add_136/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_40" + op: "RealDiv" + input: "add_134" + input: "add_136" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_172" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_217/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_217" + op: "Mul" + input: "mul_217/x" + input: "ReadVariableOp_172" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_137" + op: "AddV2" + input: "truediv_40" + input: "mul_217" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_218" + op: "Mul" + input: "PolynomialDecay" + input: "add_137" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_173" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_39" + op: "Sub" + input: "ReadVariableOp_173" + input: "mul_218" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_266" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel" + input: "sub_39" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_174" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel" + input: "^AssignVariableOp_266" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_267" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_m" + input: "add_134" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_175" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_m" + input: "^AssignVariableOp_267" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_268" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_v" + input: "add_135" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_176" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_v" + input: "^AssignVariableOp_268" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/self/key/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_m" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/self/key/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_v" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/key/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_219/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_219/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_219" + op: "Mul" + input: "Mul_219/x" + input: "Mul_219/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_220/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_220" + op: "Mul" + input: "Mul_220/x" + input: "clip_by_global_norm/clip_by_global_norm/_40" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_138" + op: "AddV2" + input: "Mul_219" + input: "Mul_220" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_221/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_221/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_221" + op: "Mul" + input: "Mul_221/x" + input: "Mul_221/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_40" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_40" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_222/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_222" + op: "Mul" + input: "Mul_222/x" + input: "Square_40" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_139" + op: "AddV2" + input: "Mul_221" + input: "Mul_222" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_40" + op: "Sqrt" + input: "add_139" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_140/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_140" + op: "AddV2" + input: "Sqrt_40" + input: "add_140/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_41" + op: "RealDiv" + input: "add_138" + input: "add_140" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_223" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_41" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_177" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_40" + op: "Sub" + input: "ReadVariableOp_177" + input: "mul_223" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_269" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias" + input: "sub_40" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_178" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias" + input: "^AssignVariableOp_269" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_270" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_m" + input: "add_138" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_179" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_m" + input: "^AssignVariableOp_270" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_271" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_v" + input: "add_139" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_180" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_v" + input: "^AssignVariableOp_271" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/self/value/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_m" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/self/value/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_v" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_224/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_224/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_224" + op: "Mul" + input: "Mul_224/x" + input: "Mul_224/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_225/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_225" + op: "Mul" + input: "Mul_225/x" + input: "clip_by_global_norm/clip_by_global_norm/_41" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_141" + op: "AddV2" + input: "Mul_224" + input: "Mul_225" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_226/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_226/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_226" + op: "Mul" + input: "Mul_226/x" + input: "Mul_226/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_41" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_41" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_227/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_227" + op: "Mul" + input: "Mul_227/x" + input: "Square_41" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_142" + op: "AddV2" + input: "Mul_226" + input: "Mul_227" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_41" + op: "Sqrt" + input: "add_142" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_143/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_143" + op: "AddV2" + input: "Sqrt_41" + input: "add_143/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_42" + op: "RealDiv" + input: "add_141" + input: "add_143" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_181" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_228/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_228" + op: "Mul" + input: "mul_228/x" + input: "ReadVariableOp_181" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_144" + op: "AddV2" + input: "truediv_42" + input: "mul_228" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_229" + op: "Mul" + input: "PolynomialDecay" + input: "add_144" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_182" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_41" + op: "Sub" + input: "ReadVariableOp_182" + input: "mul_229" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_272" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel" + input: "sub_41" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_183" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel" + input: "^AssignVariableOp_272" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_273" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_m" + input: "add_141" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_184" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_m" + input: "^AssignVariableOp_273" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_274" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_v" + input: "add_142" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_185" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_v" + input: "^AssignVariableOp_274" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/self/value/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_m" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/self/value/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_v" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/self/value/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_230/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_230/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_230" + op: "Mul" + input: "Mul_230/x" + input: "Mul_230/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_231/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_231" + op: "Mul" + input: "Mul_231/x" + input: "clip_by_global_norm/clip_by_global_norm/_42" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_145" + op: "AddV2" + input: "Mul_230" + input: "Mul_231" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_232/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_232/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_232" + op: "Mul" + input: "Mul_232/x" + input: "Mul_232/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_42" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_42" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_233/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_233" + op: "Mul" + input: "Mul_233/x" + input: "Square_42" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_146" + op: "AddV2" + input: "Mul_232" + input: "Mul_233" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_42" + op: "Sqrt" + input: "add_146" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_147/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_147" + op: "AddV2" + input: "Sqrt_42" + input: "add_147/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_43" + op: "RealDiv" + input: "add_145" + input: "add_147" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_234" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_43" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_186" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_42" + op: "Sub" + input: "ReadVariableOp_186" + input: "mul_234" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_275" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias" + input: "sub_42" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_187" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias" + input: "^AssignVariableOp_275" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_276" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_m" + input: "add_145" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_188" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_m" + input: "^AssignVariableOp_276" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_277" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_v" + input: "add_146" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_189" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_v" + input: "^AssignVariableOp_277" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_235/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_235/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_235" + op: "Mul" + input: "Mul_235/x" + input: "Mul_235/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_236/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_236" + op: "Mul" + input: "Mul_236/x" + input: "clip_by_global_norm/clip_by_global_norm/_43" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_148" + op: "AddV2" + input: "Mul_235" + input: "Mul_236" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_237/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_237/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_237" + op: "Mul" + input: "Mul_237/x" + input: "Mul_237/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_43" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_43" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_238/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_238" + op: "Mul" + input: "Mul_238/x" + input: "Square_43" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_149" + op: "AddV2" + input: "Mul_237" + input: "Mul_238" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_43" + op: "Sqrt" + input: "add_149" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_150/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_150" + op: "AddV2" + input: "Sqrt_43" + input: "add_150/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_44" + op: "RealDiv" + input: "add_148" + input: "add_150" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_190" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_239/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_239" + op: "Mul" + input: "mul_239/x" + input: "ReadVariableOp_190" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_151" + op: "AddV2" + input: "truediv_44" + input: "mul_239" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_240" + op: "Mul" + input: "PolynomialDecay" + input: "add_151" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_191" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_43" + op: "Sub" + input: "ReadVariableOp_191" + input: "mul_240" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_278" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel" + input: "sub_43" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_192" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel" + input: "^AssignVariableOp_278" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_279" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m" + input: "add_148" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_193" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m" + input: "^AssignVariableOp_279" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_280" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v" + input: "add_149" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_194" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v" + input: "^AssignVariableOp_280" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_m" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_v" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_241/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_241/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_241" + op: "Mul" + input: "Mul_241/x" + input: "Mul_241/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_242/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_242" + op: "Mul" + input: "Mul_242/x" + input: "clip_by_global_norm/clip_by_global_norm/_44" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_152" + op: "AddV2" + input: "Mul_241" + input: "Mul_242" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_243/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_243/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_243" + op: "Mul" + input: "Mul_243/x" + input: "Mul_243/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_44" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_44" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_244/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_244" + op: "Mul" + input: "Mul_244/x" + input: "Square_44" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_153" + op: "AddV2" + input: "Mul_243" + input: "Mul_244" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_44" + op: "Sqrt" + input: "add_153" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_154/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_154" + op: "AddV2" + input: "Sqrt_44" + input: "add_154/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_45" + op: "RealDiv" + input: "add_152" + input: "add_154" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_245" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_45" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_195" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_44" + op: "Sub" + input: "ReadVariableOp_195" + input: "mul_245" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_281" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias" + input: "sub_44" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_196" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias" + input: "^AssignVariableOp_281" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_282" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_m" + input: "add_152" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_197" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_m" + input: "^AssignVariableOp_282" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_283" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_v" + input: "add_153" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_198" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_v" + input: "^AssignVariableOp_283" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_246/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_246/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_246" + op: "Mul" + input: "Mul_246/x" + input: "Mul_246/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_247/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_247" + op: "Mul" + input: "Mul_247/x" + input: "clip_by_global_norm/clip_by_global_norm/_45" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_155" + op: "AddV2" + input: "Mul_246" + input: "Mul_247" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_248/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_248/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_248" + op: "Mul" + input: "Mul_248/x" + input: "Mul_248/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_45" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_45" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_249/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_249" + op: "Mul" + input: "Mul_249/x" + input: "Square_45" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_156" + op: "AddV2" + input: "Mul_248" + input: "Mul_249" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_45" + op: "Sqrt" + input: "add_156" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_157/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_157" + op: "AddV2" + input: "Sqrt_45" + input: "add_157/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_46" + op: "RealDiv" + input: "add_155" + input: "add_157" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_250" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_46" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_199" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_45" + op: "Sub" + input: "ReadVariableOp_199" + input: "mul_250" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_284" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma" + input: "sub_45" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_200" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma" + input: "^AssignVariableOp_284" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_285" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m" + input: "add_155" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_201" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m" + input: "^AssignVariableOp_285" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_286" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v" + input: "add_156" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_202" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v" + input: "^AssignVariableOp_286" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_251/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_251/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_251" + op: "Mul" + input: "Mul_251/x" + input: "Mul_251/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_252/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_252" + op: "Mul" + input: "Mul_252/x" + input: "clip_by_global_norm/clip_by_global_norm/_46" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_158" + op: "AddV2" + input: "Mul_251" + input: "Mul_252" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_253/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_253/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_253" + op: "Mul" + input: "Mul_253/x" + input: "Mul_253/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_46" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_46" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_254/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_254" + op: "Mul" + input: "Mul_254/x" + input: "Square_46" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_159" + op: "AddV2" + input: "Mul_253" + input: "Mul_254" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_46" + op: "Sqrt" + input: "add_159" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_160/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_160" + op: "AddV2" + input: "Sqrt_46" + input: "add_160/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_47" + op: "RealDiv" + input: "add_158" + input: "add_160" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_255" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_47" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_203" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_46" + op: "Sub" + input: "ReadVariableOp_203" + input: "mul_255" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_287" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta" + input: "sub_46" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_204" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta" + input: "^AssignVariableOp_287" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_288" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m" + input: "add_158" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_205" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m" + input: "^AssignVariableOp_288" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_289" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v" + input: "add_159" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_206" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v" + input: "^AssignVariableOp_289" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_256/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_256/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_256" + op: "Mul" + input: "Mul_256/x" + input: "Mul_256/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_257/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_257" + op: "Mul" + input: "Mul_257/x" + input: "clip_by_global_norm/clip_by_global_norm/_47" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_161" + op: "AddV2" + input: "Mul_256" + input: "Mul_257" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_258/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_258/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_258" + op: "Mul" + input: "Mul_258/x" + input: "Mul_258/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_47" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_47" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_259/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_259" + op: "Mul" + input: "Mul_259/x" + input: "Square_47" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_162" + op: "AddV2" + input: "Mul_258" + input: "Mul_259" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_47" + op: "Sqrt" + input: "add_162" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_163/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_163" + op: "AddV2" + input: "Sqrt_47" + input: "add_163/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_48" + op: "RealDiv" + input: "add_161" + input: "add_163" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_207" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_260/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_260" + op: "Mul" + input: "mul_260/x" + input: "ReadVariableOp_207" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_164" + op: "AddV2" + input: "truediv_48" + input: "mul_260" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_261" + op: "Mul" + input: "PolynomialDecay" + input: "add_164" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_208" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_47" + op: "Sub" + input: "ReadVariableOp_208" + input: "mul_261" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_290" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel" + input: "sub_47" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_209" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel" + input: "^AssignVariableOp_290" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_291" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m" + input: "add_161" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_210" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m" + input: "^AssignVariableOp_291" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_292" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v" + input: "add_162" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_211" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v" + input: "^AssignVariableOp_292" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/intermediate/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_m" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/intermediate/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_v" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_262/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_262/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_262" + op: "Mul" + input: "Mul_262/x" + input: "Mul_262/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_263/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_263" + op: "Mul" + input: "Mul_263/x" + input: "clip_by_global_norm/clip_by_global_norm/_48" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_165" + op: "AddV2" + input: "Mul_262" + input: "Mul_263" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_264/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_264/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_264" + op: "Mul" + input: "Mul_264/x" + input: "Mul_264/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_48" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_48" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_265/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_265" + op: "Mul" + input: "Mul_265/x" + input: "Square_48" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_166" + op: "AddV2" + input: "Mul_264" + input: "Mul_265" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_48" + op: "Sqrt" + input: "add_166" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_167/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_167" + op: "AddV2" + input: "Sqrt_48" + input: "add_167/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_49" + op: "RealDiv" + input: "add_165" + input: "add_167" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_266" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_49" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_212" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_48" + op: "Sub" + input: "ReadVariableOp_212" + input: "mul_266" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_293" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias" + input: "sub_48" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_213" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias" + input: "^AssignVariableOp_293" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_294" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_m" + input: "add_165" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_214" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_m" + input: "^AssignVariableOp_294" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_295" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_v" + input: "add_166" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_215" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_v" + input: "^AssignVariableOp_295" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_2/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_2/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel/adam_m" + input: "bert/encoder/layer_2/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_2/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_2/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel/adam_v" + input: "bert/encoder/layer_2/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_267/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_267/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_267" + op: "Mul" + input: "Mul_267/x" + input: "Mul_267/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_268/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_268" + op: "Mul" + input: "Mul_268/x" + input: "clip_by_global_norm/clip_by_global_norm/_49" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_168" + op: "AddV2" + input: "Mul_267" + input: "Mul_268" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_269/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_269/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_269" + op: "Mul" + input: "Mul_269/x" + input: "Mul_269/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_49" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_49" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_270/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_270" + op: "Mul" + input: "Mul_270/x" + input: "Square_49" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_169" + op: "AddV2" + input: "Mul_269" + input: "Mul_270" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_49" + op: "Sqrt" + input: "add_169" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_170/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_170" + op: "AddV2" + input: "Sqrt_49" + input: "add_170/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_50" + op: "RealDiv" + input: "add_168" + input: "add_170" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_216" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_271/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_271" + op: "Mul" + input: "mul_271/x" + input: "ReadVariableOp_216" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_171" + op: "AddV2" + input: "truediv_50" + input: "mul_271" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_272" + op: "Mul" + input: "PolynomialDecay" + input: "add_171" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_217" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_49" + op: "Sub" + input: "ReadVariableOp_217" + input: "mul_272" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_296" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel" + input: "sub_49" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_218" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel" + input: "^AssignVariableOp_296" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_297" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel/adam_m" + input: "add_168" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_219" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel/adam_m" + input: "^AssignVariableOp_297" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_298" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel/adam_v" + input: "add_169" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_220" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel/adam_v" + input: "^AssignVariableOp_298" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/dense/bias/adam_m" + input: "bert/encoder/layer_2/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/dense/bias/adam_v" + input: "bert/encoder/layer_2/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_273/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_273/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_273" + op: "Mul" + input: "Mul_273/x" + input: "Mul_273/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_274/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_274" + op: "Mul" + input: "Mul_274/x" + input: "clip_by_global_norm/clip_by_global_norm/_50" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_172" + op: "AddV2" + input: "Mul_273" + input: "Mul_274" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_275/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_275/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_275" + op: "Mul" + input: "Mul_275/x" + input: "Mul_275/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_50" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_50" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_276/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_276" + op: "Mul" + input: "Mul_276/x" + input: "Square_50" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_173" + op: "AddV2" + input: "Mul_275" + input: "Mul_276" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_50" + op: "Sqrt" + input: "add_173" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_174/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_174" + op: "AddV2" + input: "Sqrt_50" + input: "add_174/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_51" + op: "RealDiv" + input: "add_172" + input: "add_174" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_277" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_51" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_221" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_50" + op: "Sub" + input: "ReadVariableOp_221" + input: "mul_277" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_299" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/dense/bias" + input: "sub_50" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_222" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/bias" + input: "^AssignVariableOp_299" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_300" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/dense/bias/adam_m" + input: "add_172" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_223" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/bias/adam_m" + input: "^AssignVariableOp_300" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_301" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/dense/bias/adam_v" + input: "add_173" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_224" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/bias/adam_v" + input: "^AssignVariableOp_301" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_278/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_278/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_278" + op: "Mul" + input: "Mul_278/x" + input: "Mul_278/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_279/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_279" + op: "Mul" + input: "Mul_279/x" + input: "clip_by_global_norm/clip_by_global_norm/_51" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_175" + op: "AddV2" + input: "Mul_278" + input: "Mul_279" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_280/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_280/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_280" + op: "Mul" + input: "Mul_280/x" + input: "Mul_280/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_51" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_51" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_281/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_281" + op: "Mul" + input: "Mul_281/x" + input: "Square_51" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_176" + op: "AddV2" + input: "Mul_280" + input: "Mul_281" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_51" + op: "Sqrt" + input: "add_176" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_177/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_177" + op: "AddV2" + input: "Sqrt_51" + input: "add_177/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_52" + op: "RealDiv" + input: "add_175" + input: "add_177" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_282" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_52" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_225" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_51" + op: "Sub" + input: "ReadVariableOp_225" + input: "mul_282" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_302" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma" + input: "sub_51" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_226" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma" + input: "^AssignVariableOp_302" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_303" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m" + input: "add_175" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_227" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m" + input: "^AssignVariableOp_303" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_304" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v" + input: "add_176" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_228" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v" + input: "^AssignVariableOp_304" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_283/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_283/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_283" + op: "Mul" + input: "Mul_283/x" + input: "Mul_283/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_284/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_284" + op: "Mul" + input: "Mul_284/x" + input: "clip_by_global_norm/clip_by_global_norm/_52" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_178" + op: "AddV2" + input: "Mul_283" + input: "Mul_284" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_285/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_285/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_285" + op: "Mul" + input: "Mul_285/x" + input: "Mul_285/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_52" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_52" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_286/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_286" + op: "Mul" + input: "Mul_286/x" + input: "Square_52" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_179" + op: "AddV2" + input: "Mul_285" + input: "Mul_286" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_52" + op: "Sqrt" + input: "add_179" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_180/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_180" + op: "AddV2" + input: "Sqrt_52" + input: "add_180/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_53" + op: "RealDiv" + input: "add_178" + input: "add_180" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_287" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_53" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_229" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_52" + op: "Sub" + input: "ReadVariableOp_229" + input: "mul_287" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_305" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta" + input: "sub_52" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_230" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta" + input: "^AssignVariableOp_305" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_306" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m" + input: "add_178" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_231" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m" + input: "^AssignVariableOp_306" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_307" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v" + input: "add_179" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_232" + op: "ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v" + input: "^AssignVariableOp_307" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/self/query/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_m" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/self/query/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_v" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_288/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_288/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_288" + op: "Mul" + input: "Mul_288/x" + input: "Mul_288/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_289/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_289" + op: "Mul" + input: "Mul_289/x" + input: "clip_by_global_norm/clip_by_global_norm/_53" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_181" + op: "AddV2" + input: "Mul_288" + input: "Mul_289" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_290/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_290/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_290" + op: "Mul" + input: "Mul_290/x" + input: "Mul_290/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_53" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_53" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_291/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_291" + op: "Mul" + input: "Mul_291/x" + input: "Square_53" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_182" + op: "AddV2" + input: "Mul_290" + input: "Mul_291" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_53" + op: "Sqrt" + input: "add_182" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_183/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_183" + op: "AddV2" + input: "Sqrt_53" + input: "add_183/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_54" + op: "RealDiv" + input: "add_181" + input: "add_183" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_233" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_292/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_292" + op: "Mul" + input: "mul_292/x" + input: "ReadVariableOp_233" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_184" + op: "AddV2" + input: "truediv_54" + input: "mul_292" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_293" + op: "Mul" + input: "PolynomialDecay" + input: "add_184" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_234" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_53" + op: "Sub" + input: "ReadVariableOp_234" + input: "mul_293" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_308" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel" + input: "sub_53" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_235" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel" + input: "^AssignVariableOp_308" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_309" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_m" + input: "add_181" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_236" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_m" + input: "^AssignVariableOp_309" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_310" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_v" + input: "add_182" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_237" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_v" + input: "^AssignVariableOp_310" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/self/query/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_m" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/self/query/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_v" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/query/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_294/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_294/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_294" + op: "Mul" + input: "Mul_294/x" + input: "Mul_294/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_295/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_295" + op: "Mul" + input: "Mul_295/x" + input: "clip_by_global_norm/clip_by_global_norm/_54" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_185" + op: "AddV2" + input: "Mul_294" + input: "Mul_295" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_296/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_296/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_296" + op: "Mul" + input: "Mul_296/x" + input: "Mul_296/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_54" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_54" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_297/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_297" + op: "Mul" + input: "Mul_297/x" + input: "Square_54" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_186" + op: "AddV2" + input: "Mul_296" + input: "Mul_297" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_54" + op: "Sqrt" + input: "add_186" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_187/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_187" + op: "AddV2" + input: "Sqrt_54" + input: "add_187/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_55" + op: "RealDiv" + input: "add_185" + input: "add_187" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_298" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_55" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_238" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_54" + op: "Sub" + input: "ReadVariableOp_238" + input: "mul_298" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_311" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias" + input: "sub_54" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_239" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias" + input: "^AssignVariableOp_311" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_312" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_m" + input: "add_185" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_240" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_m" + input: "^AssignVariableOp_312" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_313" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_v" + input: "add_186" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_241" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_v" + input: "^AssignVariableOp_313" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/self/key/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_m" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/self/key/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_v" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_299/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_299/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_299" + op: "Mul" + input: "Mul_299/x" + input: "Mul_299/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_300/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_300" + op: "Mul" + input: "Mul_300/x" + input: "clip_by_global_norm/clip_by_global_norm/_55" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_188" + op: "AddV2" + input: "Mul_299" + input: "Mul_300" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_301/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_301/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_301" + op: "Mul" + input: "Mul_301/x" + input: "Mul_301/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_55" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_55" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_302/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_302" + op: "Mul" + input: "Mul_302/x" + input: "Square_55" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_189" + op: "AddV2" + input: "Mul_301" + input: "Mul_302" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_55" + op: "Sqrt" + input: "add_189" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_190/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_190" + op: "AddV2" + input: "Sqrt_55" + input: "add_190/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_56" + op: "RealDiv" + input: "add_188" + input: "add_190" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_242" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_303/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_303" + op: "Mul" + input: "mul_303/x" + input: "ReadVariableOp_242" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_191" + op: "AddV2" + input: "truediv_56" + input: "mul_303" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_304" + op: "Mul" + input: "PolynomialDecay" + input: "add_191" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_243" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_55" + op: "Sub" + input: "ReadVariableOp_243" + input: "mul_304" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_314" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel" + input: "sub_55" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_244" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel" + input: "^AssignVariableOp_314" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_315" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_m" + input: "add_188" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_245" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_m" + input: "^AssignVariableOp_315" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_316" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_v" + input: "add_189" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_246" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_v" + input: "^AssignVariableOp_316" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/self/key/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_m" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/self/key/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_v" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/key/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_305/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_305/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_305" + op: "Mul" + input: "Mul_305/x" + input: "Mul_305/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_306/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_306" + op: "Mul" + input: "Mul_306/x" + input: "clip_by_global_norm/clip_by_global_norm/_56" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_192" + op: "AddV2" + input: "Mul_305" + input: "Mul_306" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_307/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_307/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_307" + op: "Mul" + input: "Mul_307/x" + input: "Mul_307/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_56" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_56" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_308/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_308" + op: "Mul" + input: "Mul_308/x" + input: "Square_56" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_193" + op: "AddV2" + input: "Mul_307" + input: "Mul_308" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_56" + op: "Sqrt" + input: "add_193" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_194/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_194" + op: "AddV2" + input: "Sqrt_56" + input: "add_194/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_57" + op: "RealDiv" + input: "add_192" + input: "add_194" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_309" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_57" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_247" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_56" + op: "Sub" + input: "ReadVariableOp_247" + input: "mul_309" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_317" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias" + input: "sub_56" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_248" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias" + input: "^AssignVariableOp_317" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_318" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_m" + input: "add_192" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_249" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_m" + input: "^AssignVariableOp_318" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_319" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_v" + input: "add_193" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_250" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_v" + input: "^AssignVariableOp_319" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/self/value/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_m" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/self/value/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_v" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_310/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_310/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_310" + op: "Mul" + input: "Mul_310/x" + input: "Mul_310/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_311/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_311" + op: "Mul" + input: "Mul_311/x" + input: "clip_by_global_norm/clip_by_global_norm/_57" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_195" + op: "AddV2" + input: "Mul_310" + input: "Mul_311" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_312/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_312/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_312" + op: "Mul" + input: "Mul_312/x" + input: "Mul_312/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_57" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_57" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_313/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_313" + op: "Mul" + input: "Mul_313/x" + input: "Square_57" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_196" + op: "AddV2" + input: "Mul_312" + input: "Mul_313" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_57" + op: "Sqrt" + input: "add_196" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_197/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_197" + op: "AddV2" + input: "Sqrt_57" + input: "add_197/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_58" + op: "RealDiv" + input: "add_195" + input: "add_197" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_251" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_314/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_314" + op: "Mul" + input: "mul_314/x" + input: "ReadVariableOp_251" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_198" + op: "AddV2" + input: "truediv_58" + input: "mul_314" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_315" + op: "Mul" + input: "PolynomialDecay" + input: "add_198" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_252" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_57" + op: "Sub" + input: "ReadVariableOp_252" + input: "mul_315" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_320" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel" + input: "sub_57" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_253" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel" + input: "^AssignVariableOp_320" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_321" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_m" + input: "add_195" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_254" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_m" + input: "^AssignVariableOp_321" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_322" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_v" + input: "add_196" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_255" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_v" + input: "^AssignVariableOp_322" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/self/value/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_m" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/self/value/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_v" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/self/value/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_316/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_316/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_316" + op: "Mul" + input: "Mul_316/x" + input: "Mul_316/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_317/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_317" + op: "Mul" + input: "Mul_317/x" + input: "clip_by_global_norm/clip_by_global_norm/_58" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_199" + op: "AddV2" + input: "Mul_316" + input: "Mul_317" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_318/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_318/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_318" + op: "Mul" + input: "Mul_318/x" + input: "Mul_318/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_58" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_58" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_319/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_319" + op: "Mul" + input: "Mul_319/x" + input: "Square_58" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_200" + op: "AddV2" + input: "Mul_318" + input: "Mul_319" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_58" + op: "Sqrt" + input: "add_200" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_201/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_201" + op: "AddV2" + input: "Sqrt_58" + input: "add_201/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_59" + op: "RealDiv" + input: "add_199" + input: "add_201" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_320" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_59" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_256" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_58" + op: "Sub" + input: "ReadVariableOp_256" + input: "mul_320" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_323" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias" + input: "sub_58" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_257" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias" + input: "^AssignVariableOp_323" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_324" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_m" + input: "add_199" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_258" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_m" + input: "^AssignVariableOp_324" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_325" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_v" + input: "add_200" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_259" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_v" + input: "^AssignVariableOp_325" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_321/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_321/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_321" + op: "Mul" + input: "Mul_321/x" + input: "Mul_321/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_322/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_322" + op: "Mul" + input: "Mul_322/x" + input: "clip_by_global_norm/clip_by_global_norm/_59" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_202" + op: "AddV2" + input: "Mul_321" + input: "Mul_322" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_323/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_323/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_323" + op: "Mul" + input: "Mul_323/x" + input: "Mul_323/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_59" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_59" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_324/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_324" + op: "Mul" + input: "Mul_324/x" + input: "Square_59" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_203" + op: "AddV2" + input: "Mul_323" + input: "Mul_324" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_59" + op: "Sqrt" + input: "add_203" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_204/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_204" + op: "AddV2" + input: "Sqrt_59" + input: "add_204/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_60" + op: "RealDiv" + input: "add_202" + input: "add_204" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_260" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_325/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_325" + op: "Mul" + input: "mul_325/x" + input: "ReadVariableOp_260" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_205" + op: "AddV2" + input: "truediv_60" + input: "mul_325" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_326" + op: "Mul" + input: "PolynomialDecay" + input: "add_205" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_261" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_59" + op: "Sub" + input: "ReadVariableOp_261" + input: "mul_326" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_326" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel" + input: "sub_59" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_262" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel" + input: "^AssignVariableOp_326" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_327" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m" + input: "add_202" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_263" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m" + input: "^AssignVariableOp_327" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_328" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v" + input: "add_203" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_264" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v" + input: "^AssignVariableOp_328" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_m" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_v" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_327/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_327/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_327" + op: "Mul" + input: "Mul_327/x" + input: "Mul_327/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_328/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_328" + op: "Mul" + input: "Mul_328/x" + input: "clip_by_global_norm/clip_by_global_norm/_60" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_206" + op: "AddV2" + input: "Mul_327" + input: "Mul_328" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_329/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_329/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_329" + op: "Mul" + input: "Mul_329/x" + input: "Mul_329/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_60" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_60" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_330/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_330" + op: "Mul" + input: "Mul_330/x" + input: "Square_60" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_207" + op: "AddV2" + input: "Mul_329" + input: "Mul_330" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_60" + op: "Sqrt" + input: "add_207" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_208/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_208" + op: "AddV2" + input: "Sqrt_60" + input: "add_208/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_61" + op: "RealDiv" + input: "add_206" + input: "add_208" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_331" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_61" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_265" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_60" + op: "Sub" + input: "ReadVariableOp_265" + input: "mul_331" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_329" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias" + input: "sub_60" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_266" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias" + input: "^AssignVariableOp_329" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_330" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_m" + input: "add_206" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_267" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_m" + input: "^AssignVariableOp_330" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_331" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_v" + input: "add_207" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_268" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_v" + input: "^AssignVariableOp_331" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_332/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_332/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_332" + op: "Mul" + input: "Mul_332/x" + input: "Mul_332/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_333/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_333" + op: "Mul" + input: "Mul_333/x" + input: "clip_by_global_norm/clip_by_global_norm/_61" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_209" + op: "AddV2" + input: "Mul_332" + input: "Mul_333" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_334/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_334/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_334" + op: "Mul" + input: "Mul_334/x" + input: "Mul_334/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_61" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_61" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_335/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_335" + op: "Mul" + input: "Mul_335/x" + input: "Square_61" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_210" + op: "AddV2" + input: "Mul_334" + input: "Mul_335" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_61" + op: "Sqrt" + input: "add_210" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_211/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_211" + op: "AddV2" + input: "Sqrt_61" + input: "add_211/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_62" + op: "RealDiv" + input: "add_209" + input: "add_211" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_336" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_62" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_269" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_61" + op: "Sub" + input: "ReadVariableOp_269" + input: "mul_336" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_332" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma" + input: "sub_61" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_270" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma" + input: "^AssignVariableOp_332" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_333" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m" + input: "add_209" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_271" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m" + input: "^AssignVariableOp_333" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_334" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v" + input: "add_210" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_272" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v" + input: "^AssignVariableOp_334" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_337/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_337/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_337" + op: "Mul" + input: "Mul_337/x" + input: "Mul_337/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_338/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_338" + op: "Mul" + input: "Mul_338/x" + input: "clip_by_global_norm/clip_by_global_norm/_62" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_212" + op: "AddV2" + input: "Mul_337" + input: "Mul_338" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_339/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_339/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_339" + op: "Mul" + input: "Mul_339/x" + input: "Mul_339/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_62" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_62" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_340/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_340" + op: "Mul" + input: "Mul_340/x" + input: "Square_62" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_213" + op: "AddV2" + input: "Mul_339" + input: "Mul_340" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_62" + op: "Sqrt" + input: "add_213" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_214/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_214" + op: "AddV2" + input: "Sqrt_62" + input: "add_214/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_63" + op: "RealDiv" + input: "add_212" + input: "add_214" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_341" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_63" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_273" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_62" + op: "Sub" + input: "ReadVariableOp_273" + input: "mul_341" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_335" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta" + input: "sub_62" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_274" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta" + input: "^AssignVariableOp_335" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_336" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m" + input: "add_212" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_275" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m" + input: "^AssignVariableOp_336" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_337" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v" + input: "add_213" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_276" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v" + input: "^AssignVariableOp_337" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_342/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_342/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_342" + op: "Mul" + input: "Mul_342/x" + input: "Mul_342/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_343/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_343" + op: "Mul" + input: "Mul_343/x" + input: "clip_by_global_norm/clip_by_global_norm/_63" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_215" + op: "AddV2" + input: "Mul_342" + input: "Mul_343" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_344/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_344/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_344" + op: "Mul" + input: "Mul_344/x" + input: "Mul_344/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_63" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_63" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_345/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_345" + op: "Mul" + input: "Mul_345/x" + input: "Square_63" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_216" + op: "AddV2" + input: "Mul_344" + input: "Mul_345" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_63" + op: "Sqrt" + input: "add_216" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_217/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_217" + op: "AddV2" + input: "Sqrt_63" + input: "add_217/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_64" + op: "RealDiv" + input: "add_215" + input: "add_217" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_277" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_346/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_346" + op: "Mul" + input: "mul_346/x" + input: "ReadVariableOp_277" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_218" + op: "AddV2" + input: "truediv_64" + input: "mul_346" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_347" + op: "Mul" + input: "PolynomialDecay" + input: "add_218" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_278" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_63" + op: "Sub" + input: "ReadVariableOp_278" + input: "mul_347" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_338" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel" + input: "sub_63" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_279" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel" + input: "^AssignVariableOp_338" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_339" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m" + input: "add_215" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_280" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m" + input: "^AssignVariableOp_339" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_340" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v" + input: "add_216" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_281" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v" + input: "^AssignVariableOp_340" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/intermediate/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_m" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/intermediate/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_v" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_348/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_348/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_348" + op: "Mul" + input: "Mul_348/x" + input: "Mul_348/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_349/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_349" + op: "Mul" + input: "Mul_349/x" + input: "clip_by_global_norm/clip_by_global_norm/_64" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_219" + op: "AddV2" + input: "Mul_348" + input: "Mul_349" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_350/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_350/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_350" + op: "Mul" + input: "Mul_350/x" + input: "Mul_350/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_64" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_64" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_351/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_351" + op: "Mul" + input: "Mul_351/x" + input: "Square_64" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_220" + op: "AddV2" + input: "Mul_350" + input: "Mul_351" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_64" + op: "Sqrt" + input: "add_220" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_221/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_221" + op: "AddV2" + input: "Sqrt_64" + input: "add_221/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_65" + op: "RealDiv" + input: "add_219" + input: "add_221" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_352" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_65" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_282" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_64" + op: "Sub" + input: "ReadVariableOp_282" + input: "mul_352" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_341" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias" + input: "sub_64" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_283" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias" + input: "^AssignVariableOp_341" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_342" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_m" + input: "add_219" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_284" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_m" + input: "^AssignVariableOp_342" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_343" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_v" + input: "add_220" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_285" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_v" + input: "^AssignVariableOp_343" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_3/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_3/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel/adam_m" + input: "bert/encoder/layer_3/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_3/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_3/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel/adam_v" + input: "bert/encoder/layer_3/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_353/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_353/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_353" + op: "Mul" + input: "Mul_353/x" + input: "Mul_353/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_354/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_354" + op: "Mul" + input: "Mul_354/x" + input: "clip_by_global_norm/clip_by_global_norm/_65" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_222" + op: "AddV2" + input: "Mul_353" + input: "Mul_354" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_355/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_355/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_355" + op: "Mul" + input: "Mul_355/x" + input: "Mul_355/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_65" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_65" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_356/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_356" + op: "Mul" + input: "Mul_356/x" + input: "Square_65" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_223" + op: "AddV2" + input: "Mul_355" + input: "Mul_356" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_65" + op: "Sqrt" + input: "add_223" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_224/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_224" + op: "AddV2" + input: "Sqrt_65" + input: "add_224/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_66" + op: "RealDiv" + input: "add_222" + input: "add_224" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_286" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_357/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_357" + op: "Mul" + input: "mul_357/x" + input: "ReadVariableOp_286" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_225" + op: "AddV2" + input: "truediv_66" + input: "mul_357" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_358" + op: "Mul" + input: "PolynomialDecay" + input: "add_225" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_287" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_65" + op: "Sub" + input: "ReadVariableOp_287" + input: "mul_358" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_344" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel" + input: "sub_65" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_288" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel" + input: "^AssignVariableOp_344" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_345" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel/adam_m" + input: "add_222" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_289" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel/adam_m" + input: "^AssignVariableOp_345" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_346" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel/adam_v" + input: "add_223" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_290" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel/adam_v" + input: "^AssignVariableOp_346" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/dense/bias/adam_m" + input: "bert/encoder/layer_3/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/dense/bias/adam_v" + input: "bert/encoder/layer_3/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_359/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_359/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_359" + op: "Mul" + input: "Mul_359/x" + input: "Mul_359/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_360/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_360" + op: "Mul" + input: "Mul_360/x" + input: "clip_by_global_norm/clip_by_global_norm/_66" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_226" + op: "AddV2" + input: "Mul_359" + input: "Mul_360" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_361/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_361/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_361" + op: "Mul" + input: "Mul_361/x" + input: "Mul_361/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_66" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_66" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_362/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_362" + op: "Mul" + input: "Mul_362/x" + input: "Square_66" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_227" + op: "AddV2" + input: "Mul_361" + input: "Mul_362" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_66" + op: "Sqrt" + input: "add_227" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_228/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_228" + op: "AddV2" + input: "Sqrt_66" + input: "add_228/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_67" + op: "RealDiv" + input: "add_226" + input: "add_228" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_363" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_67" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_291" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_66" + op: "Sub" + input: "ReadVariableOp_291" + input: "mul_363" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_347" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/dense/bias" + input: "sub_66" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_292" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/bias" + input: "^AssignVariableOp_347" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_348" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/dense/bias/adam_m" + input: "add_226" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_293" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/bias/adam_m" + input: "^AssignVariableOp_348" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_349" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/dense/bias/adam_v" + input: "add_227" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_294" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/bias/adam_v" + input: "^AssignVariableOp_349" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_364/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_364/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_364" + op: "Mul" + input: "Mul_364/x" + input: "Mul_364/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_365/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_365" + op: "Mul" + input: "Mul_365/x" + input: "clip_by_global_norm/clip_by_global_norm/_67" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_229" + op: "AddV2" + input: "Mul_364" + input: "Mul_365" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_366/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_366/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_366" + op: "Mul" + input: "Mul_366/x" + input: "Mul_366/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_67" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_67" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_367/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_367" + op: "Mul" + input: "Mul_367/x" + input: "Square_67" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_230" + op: "AddV2" + input: "Mul_366" + input: "Mul_367" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_67" + op: "Sqrt" + input: "add_230" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_231/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_231" + op: "AddV2" + input: "Sqrt_67" + input: "add_231/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_68" + op: "RealDiv" + input: "add_229" + input: "add_231" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_368" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_68" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_295" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_67" + op: "Sub" + input: "ReadVariableOp_295" + input: "mul_368" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_350" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma" + input: "sub_67" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_296" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma" + input: "^AssignVariableOp_350" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_351" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m" + input: "add_229" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_297" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m" + input: "^AssignVariableOp_351" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_352" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v" + input: "add_230" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_298" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v" + input: "^AssignVariableOp_352" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_369/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_369/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_369" + op: "Mul" + input: "Mul_369/x" + input: "Mul_369/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_370/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_370" + op: "Mul" + input: "Mul_370/x" + input: "clip_by_global_norm/clip_by_global_norm/_68" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_232" + op: "AddV2" + input: "Mul_369" + input: "Mul_370" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_371/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_371/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_371" + op: "Mul" + input: "Mul_371/x" + input: "Mul_371/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_68" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_68" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_372/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_372" + op: "Mul" + input: "Mul_372/x" + input: "Square_68" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_233" + op: "AddV2" + input: "Mul_371" + input: "Mul_372" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_68" + op: "Sqrt" + input: "add_233" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_234/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_234" + op: "AddV2" + input: "Sqrt_68" + input: "add_234/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_69" + op: "RealDiv" + input: "add_232" + input: "add_234" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_373" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_69" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_299" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_68" + op: "Sub" + input: "ReadVariableOp_299" + input: "mul_373" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_353" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta" + input: "sub_68" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_300" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta" + input: "^AssignVariableOp_353" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_354" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m" + input: "add_232" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_301" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m" + input: "^AssignVariableOp_354" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_355" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v" + input: "add_233" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_302" + op: "ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v" + input: "^AssignVariableOp_355" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/self/query/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_m" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/self/query/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_v" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_374/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_374/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_374" + op: "Mul" + input: "Mul_374/x" + input: "Mul_374/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_375/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_375" + op: "Mul" + input: "Mul_375/x" + input: "clip_by_global_norm/clip_by_global_norm/_69" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_235" + op: "AddV2" + input: "Mul_374" + input: "Mul_375" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_376/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_376/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_376" + op: "Mul" + input: "Mul_376/x" + input: "Mul_376/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_69" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_69" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_377/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_377" + op: "Mul" + input: "Mul_377/x" + input: "Square_69" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_236" + op: "AddV2" + input: "Mul_376" + input: "Mul_377" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_69" + op: "Sqrt" + input: "add_236" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_237/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_237" + op: "AddV2" + input: "Sqrt_69" + input: "add_237/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_70" + op: "RealDiv" + input: "add_235" + input: "add_237" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_303" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_378/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_378" + op: "Mul" + input: "mul_378/x" + input: "ReadVariableOp_303" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_238" + op: "AddV2" + input: "truediv_70" + input: "mul_378" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_379" + op: "Mul" + input: "PolynomialDecay" + input: "add_238" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_304" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_69" + op: "Sub" + input: "ReadVariableOp_304" + input: "mul_379" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_356" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel" + input: "sub_69" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_305" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel" + input: "^AssignVariableOp_356" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_357" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_m" + input: "add_235" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_306" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_m" + input: "^AssignVariableOp_357" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_358" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_v" + input: "add_236" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_307" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_v" + input: "^AssignVariableOp_358" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/self/query/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_m" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/self/query/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_v" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/query/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_380/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_380/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_380" + op: "Mul" + input: "Mul_380/x" + input: "Mul_380/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_381/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_381" + op: "Mul" + input: "Mul_381/x" + input: "clip_by_global_norm/clip_by_global_norm/_70" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_239" + op: "AddV2" + input: "Mul_380" + input: "Mul_381" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_382/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_382/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_382" + op: "Mul" + input: "Mul_382/x" + input: "Mul_382/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_70" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_70" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_383/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_383" + op: "Mul" + input: "Mul_383/x" + input: "Square_70" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_240" + op: "AddV2" + input: "Mul_382" + input: "Mul_383" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_70" + op: "Sqrt" + input: "add_240" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_241/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_241" + op: "AddV2" + input: "Sqrt_70" + input: "add_241/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_71" + op: "RealDiv" + input: "add_239" + input: "add_241" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_384" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_71" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_308" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_70" + op: "Sub" + input: "ReadVariableOp_308" + input: "mul_384" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_359" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias" + input: "sub_70" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_309" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias" + input: "^AssignVariableOp_359" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_360" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_m" + input: "add_239" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_310" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_m" + input: "^AssignVariableOp_360" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_361" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_v" + input: "add_240" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_311" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_v" + input: "^AssignVariableOp_361" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/self/key/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_m" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/self/key/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_v" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_385/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_385/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_385" + op: "Mul" + input: "Mul_385/x" + input: "Mul_385/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_386/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_386" + op: "Mul" + input: "Mul_386/x" + input: "clip_by_global_norm/clip_by_global_norm/_71" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_242" + op: "AddV2" + input: "Mul_385" + input: "Mul_386" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_387/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_387/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_387" + op: "Mul" + input: "Mul_387/x" + input: "Mul_387/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_71" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_71" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_388/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_388" + op: "Mul" + input: "Mul_388/x" + input: "Square_71" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_243" + op: "AddV2" + input: "Mul_387" + input: "Mul_388" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_71" + op: "Sqrt" + input: "add_243" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_244/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_244" + op: "AddV2" + input: "Sqrt_71" + input: "add_244/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_72" + op: "RealDiv" + input: "add_242" + input: "add_244" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_312" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_389/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_389" + op: "Mul" + input: "mul_389/x" + input: "ReadVariableOp_312" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_245" + op: "AddV2" + input: "truediv_72" + input: "mul_389" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_390" + op: "Mul" + input: "PolynomialDecay" + input: "add_245" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_313" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_71" + op: "Sub" + input: "ReadVariableOp_313" + input: "mul_390" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_362" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel" + input: "sub_71" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_314" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel" + input: "^AssignVariableOp_362" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_363" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_m" + input: "add_242" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_315" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_m" + input: "^AssignVariableOp_363" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_364" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_v" + input: "add_243" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_316" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_v" + input: "^AssignVariableOp_364" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/self/key/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_m" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/self/key/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_v" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/key/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_391/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_391/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_391" + op: "Mul" + input: "Mul_391/x" + input: "Mul_391/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_392/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_392" + op: "Mul" + input: "Mul_392/x" + input: "clip_by_global_norm/clip_by_global_norm/_72" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_246" + op: "AddV2" + input: "Mul_391" + input: "Mul_392" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_393/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_393/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_393" + op: "Mul" + input: "Mul_393/x" + input: "Mul_393/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_72" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_72" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_394/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_394" + op: "Mul" + input: "Mul_394/x" + input: "Square_72" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_247" + op: "AddV2" + input: "Mul_393" + input: "Mul_394" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_72" + op: "Sqrt" + input: "add_247" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_248/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_248" + op: "AddV2" + input: "Sqrt_72" + input: "add_248/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_73" + op: "RealDiv" + input: "add_246" + input: "add_248" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_395" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_73" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_317" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_72" + op: "Sub" + input: "ReadVariableOp_317" + input: "mul_395" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_365" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias" + input: "sub_72" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_318" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias" + input: "^AssignVariableOp_365" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_366" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_m" + input: "add_246" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_319" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_m" + input: "^AssignVariableOp_366" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_367" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_v" + input: "add_247" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_320" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_v" + input: "^AssignVariableOp_367" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/self/value/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_m" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/self/value/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_v" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_396/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_396/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_396" + op: "Mul" + input: "Mul_396/x" + input: "Mul_396/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_397/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_397" + op: "Mul" + input: "Mul_397/x" + input: "clip_by_global_norm/clip_by_global_norm/_73" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_249" + op: "AddV2" + input: "Mul_396" + input: "Mul_397" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_398/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_398/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_398" + op: "Mul" + input: "Mul_398/x" + input: "Mul_398/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_73" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_73" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_399/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_399" + op: "Mul" + input: "Mul_399/x" + input: "Square_73" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_250" + op: "AddV2" + input: "Mul_398" + input: "Mul_399" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_73" + op: "Sqrt" + input: "add_250" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_251/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_251" + op: "AddV2" + input: "Sqrt_73" + input: "add_251/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_74" + op: "RealDiv" + input: "add_249" + input: "add_251" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_321" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_400/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_400" + op: "Mul" + input: "mul_400/x" + input: "ReadVariableOp_321" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_252" + op: "AddV2" + input: "truediv_74" + input: "mul_400" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_401" + op: "Mul" + input: "PolynomialDecay" + input: "add_252" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_322" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_73" + op: "Sub" + input: "ReadVariableOp_322" + input: "mul_401" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_368" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel" + input: "sub_73" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_323" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel" + input: "^AssignVariableOp_368" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_369" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_m" + input: "add_249" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_324" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_m" + input: "^AssignVariableOp_369" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_370" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_v" + input: "add_250" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_325" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_v" + input: "^AssignVariableOp_370" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/self/value/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_m" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/self/value/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_v" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/self/value/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_402/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_402/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_402" + op: "Mul" + input: "Mul_402/x" + input: "Mul_402/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_403/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_403" + op: "Mul" + input: "Mul_403/x" + input: "clip_by_global_norm/clip_by_global_norm/_74" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_253" + op: "AddV2" + input: "Mul_402" + input: "Mul_403" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_404/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_404/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_404" + op: "Mul" + input: "Mul_404/x" + input: "Mul_404/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_74" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_74" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_405/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_405" + op: "Mul" + input: "Mul_405/x" + input: "Square_74" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_254" + op: "AddV2" + input: "Mul_404" + input: "Mul_405" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_74" + op: "Sqrt" + input: "add_254" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_255/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_255" + op: "AddV2" + input: "Sqrt_74" + input: "add_255/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_75" + op: "RealDiv" + input: "add_253" + input: "add_255" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_406" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_75" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_326" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_74" + op: "Sub" + input: "ReadVariableOp_326" + input: "mul_406" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_371" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias" + input: "sub_74" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_327" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias" + input: "^AssignVariableOp_371" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_372" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_m" + input: "add_253" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_328" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_m" + input: "^AssignVariableOp_372" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_373" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_v" + input: "add_254" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_329" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_v" + input: "^AssignVariableOp_373" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_407/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_407/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_407" + op: "Mul" + input: "Mul_407/x" + input: "Mul_407/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_408/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_408" + op: "Mul" + input: "Mul_408/x" + input: "clip_by_global_norm/clip_by_global_norm/_75" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_256" + op: "AddV2" + input: "Mul_407" + input: "Mul_408" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_409/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_409/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_409" + op: "Mul" + input: "Mul_409/x" + input: "Mul_409/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_75" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_75" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_410/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_410" + op: "Mul" + input: "Mul_410/x" + input: "Square_75" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_257" + op: "AddV2" + input: "Mul_409" + input: "Mul_410" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_75" + op: "Sqrt" + input: "add_257" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_258/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_258" + op: "AddV2" + input: "Sqrt_75" + input: "add_258/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_76" + op: "RealDiv" + input: "add_256" + input: "add_258" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_330" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_411/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_411" + op: "Mul" + input: "mul_411/x" + input: "ReadVariableOp_330" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_259" + op: "AddV2" + input: "truediv_76" + input: "mul_411" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_412" + op: "Mul" + input: "PolynomialDecay" + input: "add_259" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_331" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_75" + op: "Sub" + input: "ReadVariableOp_331" + input: "mul_412" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_374" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel" + input: "sub_75" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_332" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel" + input: "^AssignVariableOp_374" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_375" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m" + input: "add_256" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_333" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m" + input: "^AssignVariableOp_375" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_376" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v" + input: "add_257" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_334" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v" + input: "^AssignVariableOp_376" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_m" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_v" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_413/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_413/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_413" + op: "Mul" + input: "Mul_413/x" + input: "Mul_413/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_414/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_414" + op: "Mul" + input: "Mul_414/x" + input: "clip_by_global_norm/clip_by_global_norm/_76" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_260" + op: "AddV2" + input: "Mul_413" + input: "Mul_414" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_415/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_415/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_415" + op: "Mul" + input: "Mul_415/x" + input: "Mul_415/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_76" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_76" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_416/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_416" + op: "Mul" + input: "Mul_416/x" + input: "Square_76" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_261" + op: "AddV2" + input: "Mul_415" + input: "Mul_416" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_76" + op: "Sqrt" + input: "add_261" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_262/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_262" + op: "AddV2" + input: "Sqrt_76" + input: "add_262/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_77" + op: "RealDiv" + input: "add_260" + input: "add_262" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_417" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_77" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_335" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_76" + op: "Sub" + input: "ReadVariableOp_335" + input: "mul_417" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_377" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias" + input: "sub_76" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_336" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias" + input: "^AssignVariableOp_377" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_378" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_m" + input: "add_260" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_337" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_m" + input: "^AssignVariableOp_378" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_379" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_v" + input: "add_261" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_338" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_v" + input: "^AssignVariableOp_379" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_418/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_418/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_418" + op: "Mul" + input: "Mul_418/x" + input: "Mul_418/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_419/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_419" + op: "Mul" + input: "Mul_419/x" + input: "clip_by_global_norm/clip_by_global_norm/_77" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_263" + op: "AddV2" + input: "Mul_418" + input: "Mul_419" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_420/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_420/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_420" + op: "Mul" + input: "Mul_420/x" + input: "Mul_420/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_77" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_77" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_421/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_421" + op: "Mul" + input: "Mul_421/x" + input: "Square_77" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_264" + op: "AddV2" + input: "Mul_420" + input: "Mul_421" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_77" + op: "Sqrt" + input: "add_264" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_265/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_265" + op: "AddV2" + input: "Sqrt_77" + input: "add_265/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_78" + op: "RealDiv" + input: "add_263" + input: "add_265" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_422" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_78" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_339" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_77" + op: "Sub" + input: "ReadVariableOp_339" + input: "mul_422" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_380" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma" + input: "sub_77" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_340" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma" + input: "^AssignVariableOp_380" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_381" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m" + input: "add_263" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_341" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m" + input: "^AssignVariableOp_381" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_382" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v" + input: "add_264" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_342" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v" + input: "^AssignVariableOp_382" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_423/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_423/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_423" + op: "Mul" + input: "Mul_423/x" + input: "Mul_423/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_424/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_424" + op: "Mul" + input: "Mul_424/x" + input: "clip_by_global_norm/clip_by_global_norm/_78" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_266" + op: "AddV2" + input: "Mul_423" + input: "Mul_424" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_425/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_425/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_425" + op: "Mul" + input: "Mul_425/x" + input: "Mul_425/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_78" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_78" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_426/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_426" + op: "Mul" + input: "Mul_426/x" + input: "Square_78" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_267" + op: "AddV2" + input: "Mul_425" + input: "Mul_426" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_78" + op: "Sqrt" + input: "add_267" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_268/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_268" + op: "AddV2" + input: "Sqrt_78" + input: "add_268/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_79" + op: "RealDiv" + input: "add_266" + input: "add_268" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_427" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_79" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_343" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_78" + op: "Sub" + input: "ReadVariableOp_343" + input: "mul_427" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_383" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta" + input: "sub_78" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_344" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta" + input: "^AssignVariableOp_383" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_384" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m" + input: "add_266" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_345" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m" + input: "^AssignVariableOp_384" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_385" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v" + input: "add_267" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_346" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v" + input: "^AssignVariableOp_385" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_428/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_428/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_428" + op: "Mul" + input: "Mul_428/x" + input: "Mul_428/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_429/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_429" + op: "Mul" + input: "Mul_429/x" + input: "clip_by_global_norm/clip_by_global_norm/_79" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_269" + op: "AddV2" + input: "Mul_428" + input: "Mul_429" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_430/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_430/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_430" + op: "Mul" + input: "Mul_430/x" + input: "Mul_430/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_79" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_79" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_431/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_431" + op: "Mul" + input: "Mul_431/x" + input: "Square_79" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_270" + op: "AddV2" + input: "Mul_430" + input: "Mul_431" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_79" + op: "Sqrt" + input: "add_270" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_271/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_271" + op: "AddV2" + input: "Sqrt_79" + input: "add_271/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_80" + op: "RealDiv" + input: "add_269" + input: "add_271" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_347" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_432/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_432" + op: "Mul" + input: "mul_432/x" + input: "ReadVariableOp_347" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_272" + op: "AddV2" + input: "truediv_80" + input: "mul_432" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_433" + op: "Mul" + input: "PolynomialDecay" + input: "add_272" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_348" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_79" + op: "Sub" + input: "ReadVariableOp_348" + input: "mul_433" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_386" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel" + input: "sub_79" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_349" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel" + input: "^AssignVariableOp_386" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_387" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m" + input: "add_269" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_350" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m" + input: "^AssignVariableOp_387" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_388" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v" + input: "add_270" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_351" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v" + input: "^AssignVariableOp_388" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/intermediate/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_m" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/intermediate/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_v" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_434/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_434/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_434" + op: "Mul" + input: "Mul_434/x" + input: "Mul_434/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_435/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_435" + op: "Mul" + input: "Mul_435/x" + input: "clip_by_global_norm/clip_by_global_norm/_80" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_273" + op: "AddV2" + input: "Mul_434" + input: "Mul_435" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_436/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_436/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_436" + op: "Mul" + input: "Mul_436/x" + input: "Mul_436/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_80" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_80" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_437/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_437" + op: "Mul" + input: "Mul_437/x" + input: "Square_80" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_274" + op: "AddV2" + input: "Mul_436" + input: "Mul_437" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_80" + op: "Sqrt" + input: "add_274" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_275/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_275" + op: "AddV2" + input: "Sqrt_80" + input: "add_275/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_81" + op: "RealDiv" + input: "add_273" + input: "add_275" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_438" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_81" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_352" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_80" + op: "Sub" + input: "ReadVariableOp_352" + input: "mul_438" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_389" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias" + input: "sub_80" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_353" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias" + input: "^AssignVariableOp_389" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_390" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_m" + input: "add_273" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_354" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_m" + input: "^AssignVariableOp_390" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_391" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_v" + input: "add_274" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_355" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_v" + input: "^AssignVariableOp_391" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_4/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_4/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel/adam_m" + input: "bert/encoder/layer_4/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_4/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_4/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel/adam_v" + input: "bert/encoder/layer_4/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_439/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_439/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_439" + op: "Mul" + input: "Mul_439/x" + input: "Mul_439/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_440/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_440" + op: "Mul" + input: "Mul_440/x" + input: "clip_by_global_norm/clip_by_global_norm/_81" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_276" + op: "AddV2" + input: "Mul_439" + input: "Mul_440" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_441/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_441/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_441" + op: "Mul" + input: "Mul_441/x" + input: "Mul_441/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_81" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_81" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_442/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_442" + op: "Mul" + input: "Mul_442/x" + input: "Square_81" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_277" + op: "AddV2" + input: "Mul_441" + input: "Mul_442" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_81" + op: "Sqrt" + input: "add_277" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_278/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_278" + op: "AddV2" + input: "Sqrt_81" + input: "add_278/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_82" + op: "RealDiv" + input: "add_276" + input: "add_278" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_356" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_443/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_443" + op: "Mul" + input: "mul_443/x" + input: "ReadVariableOp_356" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_279" + op: "AddV2" + input: "truediv_82" + input: "mul_443" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_444" + op: "Mul" + input: "PolynomialDecay" + input: "add_279" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_357" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_81" + op: "Sub" + input: "ReadVariableOp_357" + input: "mul_444" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_392" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel" + input: "sub_81" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_358" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel" + input: "^AssignVariableOp_392" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_393" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel/adam_m" + input: "add_276" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_359" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel/adam_m" + input: "^AssignVariableOp_393" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_394" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel/adam_v" + input: "add_277" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_360" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel/adam_v" + input: "^AssignVariableOp_394" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/dense/bias/adam_m" + input: "bert/encoder/layer_4/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/dense/bias/adam_v" + input: "bert/encoder/layer_4/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_445/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_445/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_445" + op: "Mul" + input: "Mul_445/x" + input: "Mul_445/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_446/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_446" + op: "Mul" + input: "Mul_446/x" + input: "clip_by_global_norm/clip_by_global_norm/_82" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_280" + op: "AddV2" + input: "Mul_445" + input: "Mul_446" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_447/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_447/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_447" + op: "Mul" + input: "Mul_447/x" + input: "Mul_447/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_82" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_82" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_448/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_448" + op: "Mul" + input: "Mul_448/x" + input: "Square_82" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_281" + op: "AddV2" + input: "Mul_447" + input: "Mul_448" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_82" + op: "Sqrt" + input: "add_281" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_282/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_282" + op: "AddV2" + input: "Sqrt_82" + input: "add_282/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_83" + op: "RealDiv" + input: "add_280" + input: "add_282" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_449" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_83" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_361" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_82" + op: "Sub" + input: "ReadVariableOp_361" + input: "mul_449" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_395" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/dense/bias" + input: "sub_82" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_362" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/bias" + input: "^AssignVariableOp_395" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_396" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/dense/bias/adam_m" + input: "add_280" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_363" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/bias/adam_m" + input: "^AssignVariableOp_396" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_397" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/dense/bias/adam_v" + input: "add_281" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_364" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/bias/adam_v" + input: "^AssignVariableOp_397" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_450/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_450/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_450" + op: "Mul" + input: "Mul_450/x" + input: "Mul_450/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_451/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_451" + op: "Mul" + input: "Mul_451/x" + input: "clip_by_global_norm/clip_by_global_norm/_83" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_283" + op: "AddV2" + input: "Mul_450" + input: "Mul_451" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_452/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_452/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_452" + op: "Mul" + input: "Mul_452/x" + input: "Mul_452/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_83" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_83" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_453/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_453" + op: "Mul" + input: "Mul_453/x" + input: "Square_83" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_284" + op: "AddV2" + input: "Mul_452" + input: "Mul_453" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_83" + op: "Sqrt" + input: "add_284" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_285/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_285" + op: "AddV2" + input: "Sqrt_83" + input: "add_285/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_84" + op: "RealDiv" + input: "add_283" + input: "add_285" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_454" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_84" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_365" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_83" + op: "Sub" + input: "ReadVariableOp_365" + input: "mul_454" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_398" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma" + input: "sub_83" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_366" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma" + input: "^AssignVariableOp_398" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_399" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m" + input: "add_283" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_367" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m" + input: "^AssignVariableOp_399" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_400" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v" + input: "add_284" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_368" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v" + input: "^AssignVariableOp_400" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_455/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_455/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_455" + op: "Mul" + input: "Mul_455/x" + input: "Mul_455/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_456/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_456" + op: "Mul" + input: "Mul_456/x" + input: "clip_by_global_norm/clip_by_global_norm/_84" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_286" + op: "AddV2" + input: "Mul_455" + input: "Mul_456" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_457/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_457/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_457" + op: "Mul" + input: "Mul_457/x" + input: "Mul_457/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_84" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_84" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_458/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_458" + op: "Mul" + input: "Mul_458/x" + input: "Square_84" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_287" + op: "AddV2" + input: "Mul_457" + input: "Mul_458" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_84" + op: "Sqrt" + input: "add_287" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_288/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_288" + op: "AddV2" + input: "Sqrt_84" + input: "add_288/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_85" + op: "RealDiv" + input: "add_286" + input: "add_288" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_459" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_85" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_369" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_84" + op: "Sub" + input: "ReadVariableOp_369" + input: "mul_459" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_401" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta" + input: "sub_84" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_370" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta" + input: "^AssignVariableOp_401" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_402" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m" + input: "add_286" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_371" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m" + input: "^AssignVariableOp_402" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_403" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v" + input: "add_287" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_372" + op: "ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v" + input: "^AssignVariableOp_403" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/self/query/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_m" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/self/query/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_v" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_460/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_460/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_460" + op: "Mul" + input: "Mul_460/x" + input: "Mul_460/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_461/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_461" + op: "Mul" + input: "Mul_461/x" + input: "clip_by_global_norm/clip_by_global_norm/_85" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_289" + op: "AddV2" + input: "Mul_460" + input: "Mul_461" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_462/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_462/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_462" + op: "Mul" + input: "Mul_462/x" + input: "Mul_462/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_85" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_85" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_463/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_463" + op: "Mul" + input: "Mul_463/x" + input: "Square_85" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_290" + op: "AddV2" + input: "Mul_462" + input: "Mul_463" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_85" + op: "Sqrt" + input: "add_290" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_291/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_291" + op: "AddV2" + input: "Sqrt_85" + input: "add_291/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_86" + op: "RealDiv" + input: "add_289" + input: "add_291" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_373" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_464/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_464" + op: "Mul" + input: "mul_464/x" + input: "ReadVariableOp_373" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_292" + op: "AddV2" + input: "truediv_86" + input: "mul_464" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_465" + op: "Mul" + input: "PolynomialDecay" + input: "add_292" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_374" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_85" + op: "Sub" + input: "ReadVariableOp_374" + input: "mul_465" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_404" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel" + input: "sub_85" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_375" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel" + input: "^AssignVariableOp_404" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_405" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_m" + input: "add_289" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_376" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_m" + input: "^AssignVariableOp_405" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_406" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_v" + input: "add_290" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_377" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_v" + input: "^AssignVariableOp_406" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/self/query/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_m" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/self/query/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_v" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/query/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_466/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_466/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_466" + op: "Mul" + input: "Mul_466/x" + input: "Mul_466/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_467/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_467" + op: "Mul" + input: "Mul_467/x" + input: "clip_by_global_norm/clip_by_global_norm/_86" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_293" + op: "AddV2" + input: "Mul_466" + input: "Mul_467" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_468/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_468/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_468" + op: "Mul" + input: "Mul_468/x" + input: "Mul_468/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_86" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_86" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_469/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_469" + op: "Mul" + input: "Mul_469/x" + input: "Square_86" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_294" + op: "AddV2" + input: "Mul_468" + input: "Mul_469" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_86" + op: "Sqrt" + input: "add_294" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_295/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_295" + op: "AddV2" + input: "Sqrt_86" + input: "add_295/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_87" + op: "RealDiv" + input: "add_293" + input: "add_295" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_470" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_87" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_378" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_86" + op: "Sub" + input: "ReadVariableOp_378" + input: "mul_470" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_407" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias" + input: "sub_86" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_379" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias" + input: "^AssignVariableOp_407" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_408" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_m" + input: "add_293" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_380" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_m" + input: "^AssignVariableOp_408" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_409" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_v" + input: "add_294" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_381" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_v" + input: "^AssignVariableOp_409" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/self/key/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_m" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/self/key/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_v" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_471/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_471/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_471" + op: "Mul" + input: "Mul_471/x" + input: "Mul_471/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_472/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_472" + op: "Mul" + input: "Mul_472/x" + input: "clip_by_global_norm/clip_by_global_norm/_87" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_296" + op: "AddV2" + input: "Mul_471" + input: "Mul_472" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_473/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_473/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_473" + op: "Mul" + input: "Mul_473/x" + input: "Mul_473/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_87" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_87" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_474/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_474" + op: "Mul" + input: "Mul_474/x" + input: "Square_87" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_297" + op: "AddV2" + input: "Mul_473" + input: "Mul_474" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_87" + op: "Sqrt" + input: "add_297" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_298/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_298" + op: "AddV2" + input: "Sqrt_87" + input: "add_298/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_88" + op: "RealDiv" + input: "add_296" + input: "add_298" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_382" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_475/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_475" + op: "Mul" + input: "mul_475/x" + input: "ReadVariableOp_382" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_299" + op: "AddV2" + input: "truediv_88" + input: "mul_475" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_476" + op: "Mul" + input: "PolynomialDecay" + input: "add_299" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_383" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_87" + op: "Sub" + input: "ReadVariableOp_383" + input: "mul_476" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_410" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel" + input: "sub_87" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_384" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel" + input: "^AssignVariableOp_410" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_411" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_m" + input: "add_296" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_385" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_m" + input: "^AssignVariableOp_411" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_412" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_v" + input: "add_297" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_386" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_v" + input: "^AssignVariableOp_412" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/self/key/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_m" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/self/key/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_v" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/key/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_477/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_477/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_477" + op: "Mul" + input: "Mul_477/x" + input: "Mul_477/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_478/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_478" + op: "Mul" + input: "Mul_478/x" + input: "clip_by_global_norm/clip_by_global_norm/_88" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_300" + op: "AddV2" + input: "Mul_477" + input: "Mul_478" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_479/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_479/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_479" + op: "Mul" + input: "Mul_479/x" + input: "Mul_479/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_88" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_88" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_480/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_480" + op: "Mul" + input: "Mul_480/x" + input: "Square_88" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_301" + op: "AddV2" + input: "Mul_479" + input: "Mul_480" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_88" + op: "Sqrt" + input: "add_301" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_302/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_302" + op: "AddV2" + input: "Sqrt_88" + input: "add_302/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_89" + op: "RealDiv" + input: "add_300" + input: "add_302" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_481" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_89" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_387" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_88" + op: "Sub" + input: "ReadVariableOp_387" + input: "mul_481" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_413" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias" + input: "sub_88" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_388" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias" + input: "^AssignVariableOp_413" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_414" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_m" + input: "add_300" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_389" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_m" + input: "^AssignVariableOp_414" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_415" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_v" + input: "add_301" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_390" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_v" + input: "^AssignVariableOp_415" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/self/value/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_m" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/self/value/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_v" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_482/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_482/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_482" + op: "Mul" + input: "Mul_482/x" + input: "Mul_482/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_483/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_483" + op: "Mul" + input: "Mul_483/x" + input: "clip_by_global_norm/clip_by_global_norm/_89" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_303" + op: "AddV2" + input: "Mul_482" + input: "Mul_483" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_484/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_484/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_484" + op: "Mul" + input: "Mul_484/x" + input: "Mul_484/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_89" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_89" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_485/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_485" + op: "Mul" + input: "Mul_485/x" + input: "Square_89" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_304" + op: "AddV2" + input: "Mul_484" + input: "Mul_485" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_89" + op: "Sqrt" + input: "add_304" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_305/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_305" + op: "AddV2" + input: "Sqrt_89" + input: "add_305/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_90" + op: "RealDiv" + input: "add_303" + input: "add_305" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_391" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_486/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_486" + op: "Mul" + input: "mul_486/x" + input: "ReadVariableOp_391" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_306" + op: "AddV2" + input: "truediv_90" + input: "mul_486" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_487" + op: "Mul" + input: "PolynomialDecay" + input: "add_306" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_392" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_89" + op: "Sub" + input: "ReadVariableOp_392" + input: "mul_487" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_416" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel" + input: "sub_89" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_393" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel" + input: "^AssignVariableOp_416" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_417" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_m" + input: "add_303" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_394" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_m" + input: "^AssignVariableOp_417" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_418" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_v" + input: "add_304" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_395" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_v" + input: "^AssignVariableOp_418" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/self/value/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_m" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/self/value/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_v" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/self/value/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_488/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_488/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_488" + op: "Mul" + input: "Mul_488/x" + input: "Mul_488/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_489/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_489" + op: "Mul" + input: "Mul_489/x" + input: "clip_by_global_norm/clip_by_global_norm/_90" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_307" + op: "AddV2" + input: "Mul_488" + input: "Mul_489" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_490/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_490/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_490" + op: "Mul" + input: "Mul_490/x" + input: "Mul_490/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_90" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_90" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_491/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_491" + op: "Mul" + input: "Mul_491/x" + input: "Square_90" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_308" + op: "AddV2" + input: "Mul_490" + input: "Mul_491" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_90" + op: "Sqrt" + input: "add_308" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_309/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_309" + op: "AddV2" + input: "Sqrt_90" + input: "add_309/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_91" + op: "RealDiv" + input: "add_307" + input: "add_309" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_492" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_91" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_396" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_90" + op: "Sub" + input: "ReadVariableOp_396" + input: "mul_492" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_419" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias" + input: "sub_90" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_397" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias" + input: "^AssignVariableOp_419" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_420" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_m" + input: "add_307" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_398" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_m" + input: "^AssignVariableOp_420" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_421" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_v" + input: "add_308" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_399" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_v" + input: "^AssignVariableOp_421" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_493/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_493/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_493" + op: "Mul" + input: "Mul_493/x" + input: "Mul_493/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_494/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_494" + op: "Mul" + input: "Mul_494/x" + input: "clip_by_global_norm/clip_by_global_norm/_91" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_310" + op: "AddV2" + input: "Mul_493" + input: "Mul_494" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_495/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_495/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_495" + op: "Mul" + input: "Mul_495/x" + input: "Mul_495/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_91" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_91" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_496/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_496" + op: "Mul" + input: "Mul_496/x" + input: "Square_91" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_311" + op: "AddV2" + input: "Mul_495" + input: "Mul_496" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_91" + op: "Sqrt" + input: "add_311" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_312/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_312" + op: "AddV2" + input: "Sqrt_91" + input: "add_312/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_92" + op: "RealDiv" + input: "add_310" + input: "add_312" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_400" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_497/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_497" + op: "Mul" + input: "mul_497/x" + input: "ReadVariableOp_400" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_313" + op: "AddV2" + input: "truediv_92" + input: "mul_497" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_498" + op: "Mul" + input: "PolynomialDecay" + input: "add_313" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_401" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_91" + op: "Sub" + input: "ReadVariableOp_401" + input: "mul_498" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_422" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel" + input: "sub_91" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_402" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel" + input: "^AssignVariableOp_422" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_423" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m" + input: "add_310" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_403" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m" + input: "^AssignVariableOp_423" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_424" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v" + input: "add_311" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_404" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v" + input: "^AssignVariableOp_424" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_m" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_v" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_499/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_499/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_499" + op: "Mul" + input: "Mul_499/x" + input: "Mul_499/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_500/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_500" + op: "Mul" + input: "Mul_500/x" + input: "clip_by_global_norm/clip_by_global_norm/_92" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_314" + op: "AddV2" + input: "Mul_499" + input: "Mul_500" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_501/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_501/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_501" + op: "Mul" + input: "Mul_501/x" + input: "Mul_501/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_92" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_92" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_502/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_502" + op: "Mul" + input: "Mul_502/x" + input: "Square_92" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_315" + op: "AddV2" + input: "Mul_501" + input: "Mul_502" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_92" + op: "Sqrt" + input: "add_315" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_316/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_316" + op: "AddV2" + input: "Sqrt_92" + input: "add_316/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_93" + op: "RealDiv" + input: "add_314" + input: "add_316" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_503" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_93" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_405" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_92" + op: "Sub" + input: "ReadVariableOp_405" + input: "mul_503" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_425" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias" + input: "sub_92" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_406" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias" + input: "^AssignVariableOp_425" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_426" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_m" + input: "add_314" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_407" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_m" + input: "^AssignVariableOp_426" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_427" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_v" + input: "add_315" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_408" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_v" + input: "^AssignVariableOp_427" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_504/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_504/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_504" + op: "Mul" + input: "Mul_504/x" + input: "Mul_504/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_505/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_505" + op: "Mul" + input: "Mul_505/x" + input: "clip_by_global_norm/clip_by_global_norm/_93" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_317" + op: "AddV2" + input: "Mul_504" + input: "Mul_505" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_506/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_506/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_506" + op: "Mul" + input: "Mul_506/x" + input: "Mul_506/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_93" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_93" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_507/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_507" + op: "Mul" + input: "Mul_507/x" + input: "Square_93" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_318" + op: "AddV2" + input: "Mul_506" + input: "Mul_507" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_93" + op: "Sqrt" + input: "add_318" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_319/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_319" + op: "AddV2" + input: "Sqrt_93" + input: "add_319/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_94" + op: "RealDiv" + input: "add_317" + input: "add_319" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_508" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_94" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_409" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_93" + op: "Sub" + input: "ReadVariableOp_409" + input: "mul_508" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_428" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma" + input: "sub_93" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_410" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma" + input: "^AssignVariableOp_428" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_429" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m" + input: "add_317" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_411" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m" + input: "^AssignVariableOp_429" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_430" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v" + input: "add_318" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_412" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v" + input: "^AssignVariableOp_430" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_509/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_509/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_509" + op: "Mul" + input: "Mul_509/x" + input: "Mul_509/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_510/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_510" + op: "Mul" + input: "Mul_510/x" + input: "clip_by_global_norm/clip_by_global_norm/_94" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_320" + op: "AddV2" + input: "Mul_509" + input: "Mul_510" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_511/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_511/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_511" + op: "Mul" + input: "Mul_511/x" + input: "Mul_511/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_94" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_94" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_512/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_512" + op: "Mul" + input: "Mul_512/x" + input: "Square_94" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_321" + op: "AddV2" + input: "Mul_511" + input: "Mul_512" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_94" + op: "Sqrt" + input: "add_321" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_322/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_322" + op: "AddV2" + input: "Sqrt_94" + input: "add_322/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_95" + op: "RealDiv" + input: "add_320" + input: "add_322" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_513" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_95" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_413" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_94" + op: "Sub" + input: "ReadVariableOp_413" + input: "mul_513" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_431" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta" + input: "sub_94" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_414" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta" + input: "^AssignVariableOp_431" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_432" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m" + input: "add_320" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_415" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m" + input: "^AssignVariableOp_432" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_433" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v" + input: "add_321" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_416" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v" + input: "^AssignVariableOp_433" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_514/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_514/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_514" + op: "Mul" + input: "Mul_514/x" + input: "Mul_514/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_515/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_515" + op: "Mul" + input: "Mul_515/x" + input: "clip_by_global_norm/clip_by_global_norm/_95" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_323" + op: "AddV2" + input: "Mul_514" + input: "Mul_515" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_516/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_516/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_516" + op: "Mul" + input: "Mul_516/x" + input: "Mul_516/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_95" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_95" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_517/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_517" + op: "Mul" + input: "Mul_517/x" + input: "Square_95" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_324" + op: "AddV2" + input: "Mul_516" + input: "Mul_517" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_95" + op: "Sqrt" + input: "add_324" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_325/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_325" + op: "AddV2" + input: "Sqrt_95" + input: "add_325/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_96" + op: "RealDiv" + input: "add_323" + input: "add_325" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_417" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_518/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_518" + op: "Mul" + input: "mul_518/x" + input: "ReadVariableOp_417" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_326" + op: "AddV2" + input: "truediv_96" + input: "mul_518" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_519" + op: "Mul" + input: "PolynomialDecay" + input: "add_326" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_418" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_95" + op: "Sub" + input: "ReadVariableOp_418" + input: "mul_519" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_434" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel" + input: "sub_95" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_419" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel" + input: "^AssignVariableOp_434" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_435" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m" + input: "add_323" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_420" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m" + input: "^AssignVariableOp_435" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_436" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v" + input: "add_324" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_421" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v" + input: "^AssignVariableOp_436" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/intermediate/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_m" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/intermediate/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_v" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_520/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_520/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_520" + op: "Mul" + input: "Mul_520/x" + input: "Mul_520/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_521/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_521" + op: "Mul" + input: "Mul_521/x" + input: "clip_by_global_norm/clip_by_global_norm/_96" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_327" + op: "AddV2" + input: "Mul_520" + input: "Mul_521" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_522/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_522/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_522" + op: "Mul" + input: "Mul_522/x" + input: "Mul_522/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_96" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_96" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_523/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_523" + op: "Mul" + input: "Mul_523/x" + input: "Square_96" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_328" + op: "AddV2" + input: "Mul_522" + input: "Mul_523" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_96" + op: "Sqrt" + input: "add_328" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_329/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_329" + op: "AddV2" + input: "Sqrt_96" + input: "add_329/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_97" + op: "RealDiv" + input: "add_327" + input: "add_329" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_524" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_97" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_422" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_96" + op: "Sub" + input: "ReadVariableOp_422" + input: "mul_524" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_437" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias" + input: "sub_96" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_423" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias" + input: "^AssignVariableOp_437" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_438" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_m" + input: "add_327" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_424" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_m" + input: "^AssignVariableOp_438" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_439" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_v" + input: "add_328" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_425" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_v" + input: "^AssignVariableOp_439" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_5/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_5/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel/adam_m" + input: "bert/encoder/layer_5/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_5/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_5/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel/adam_v" + input: "bert/encoder/layer_5/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_525/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_525/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_525" + op: "Mul" + input: "Mul_525/x" + input: "Mul_525/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_526/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_526" + op: "Mul" + input: "Mul_526/x" + input: "clip_by_global_norm/clip_by_global_norm/_97" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_330" + op: "AddV2" + input: "Mul_525" + input: "Mul_526" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_527/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_527/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_527" + op: "Mul" + input: "Mul_527/x" + input: "Mul_527/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_97" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_97" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_528/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_528" + op: "Mul" + input: "Mul_528/x" + input: "Square_97" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_331" + op: "AddV2" + input: "Mul_527" + input: "Mul_528" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_97" + op: "Sqrt" + input: "add_331" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_332/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_332" + op: "AddV2" + input: "Sqrt_97" + input: "add_332/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_98" + op: "RealDiv" + input: "add_330" + input: "add_332" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_426" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_529/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_529" + op: "Mul" + input: "mul_529/x" + input: "ReadVariableOp_426" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_333" + op: "AddV2" + input: "truediv_98" + input: "mul_529" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_530" + op: "Mul" + input: "PolynomialDecay" + input: "add_333" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_427" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_97" + op: "Sub" + input: "ReadVariableOp_427" + input: "mul_530" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_440" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel" + input: "sub_97" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_428" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel" + input: "^AssignVariableOp_440" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_441" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel/adam_m" + input: "add_330" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_429" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel/adam_m" + input: "^AssignVariableOp_441" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_442" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel/adam_v" + input: "add_331" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_430" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel/adam_v" + input: "^AssignVariableOp_442" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/dense/bias/adam_m" + input: "bert/encoder/layer_5/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/dense/bias/adam_v" + input: "bert/encoder/layer_5/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_531/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_531/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_531" + op: "Mul" + input: "Mul_531/x" + input: "Mul_531/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_532/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_532" + op: "Mul" + input: "Mul_532/x" + input: "clip_by_global_norm/clip_by_global_norm/_98" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_334" + op: "AddV2" + input: "Mul_531" + input: "Mul_532" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_533/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_533/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_533" + op: "Mul" + input: "Mul_533/x" + input: "Mul_533/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_98" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_98" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_534/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_534" + op: "Mul" + input: "Mul_534/x" + input: "Square_98" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_335" + op: "AddV2" + input: "Mul_533" + input: "Mul_534" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_98" + op: "Sqrt" + input: "add_335" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_336/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_336" + op: "AddV2" + input: "Sqrt_98" + input: "add_336/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_99" + op: "RealDiv" + input: "add_334" + input: "add_336" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_535" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_99" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_431" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_98" + op: "Sub" + input: "ReadVariableOp_431" + input: "mul_535" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_443" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/dense/bias" + input: "sub_98" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_432" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/bias" + input: "^AssignVariableOp_443" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_444" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/dense/bias/adam_m" + input: "add_334" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_433" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/bias/adam_m" + input: "^AssignVariableOp_444" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_445" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/dense/bias/adam_v" + input: "add_335" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_434" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/bias/adam_v" + input: "^AssignVariableOp_445" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_536/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_536/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_536" + op: "Mul" + input: "Mul_536/x" + input: "Mul_536/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_537/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_537" + op: "Mul" + input: "Mul_537/x" + input: "clip_by_global_norm/clip_by_global_norm/_99" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_337" + op: "AddV2" + input: "Mul_536" + input: "Mul_537" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_538/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_538/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_538" + op: "Mul" + input: "Mul_538/x" + input: "Mul_538/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_99" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_99" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_539/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_539" + op: "Mul" + input: "Mul_539/x" + input: "Square_99" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_338" + op: "AddV2" + input: "Mul_538" + input: "Mul_539" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_99" + op: "Sqrt" + input: "add_338" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_339/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_339" + op: "AddV2" + input: "Sqrt_99" + input: "add_339/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_100" + op: "RealDiv" + input: "add_337" + input: "add_339" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_540" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_100" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_435" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_99" + op: "Sub" + input: "ReadVariableOp_435" + input: "mul_540" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_446" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma" + input: "sub_99" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_436" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma" + input: "^AssignVariableOp_446" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_447" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m" + input: "add_337" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_437" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m" + input: "^AssignVariableOp_447" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_448" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v" + input: "add_338" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_438" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v" + input: "^AssignVariableOp_448" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_541/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_541/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_541" + op: "Mul" + input: "Mul_541/x" + input: "Mul_541/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_542/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_542" + op: "Mul" + input: "Mul_542/x" + input: "clip_by_global_norm/clip_by_global_norm/_100" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_340" + op: "AddV2" + input: "Mul_541" + input: "Mul_542" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_543/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_543/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_543" + op: "Mul" + input: "Mul_543/x" + input: "Mul_543/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_100" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_100" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_544/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_544" + op: "Mul" + input: "Mul_544/x" + input: "Square_100" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_341" + op: "AddV2" + input: "Mul_543" + input: "Mul_544" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_100" + op: "Sqrt" + input: "add_341" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_342/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_342" + op: "AddV2" + input: "Sqrt_100" + input: "add_342/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_101" + op: "RealDiv" + input: "add_340" + input: "add_342" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_545" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_101" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_439" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_100" + op: "Sub" + input: "ReadVariableOp_439" + input: "mul_545" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_449" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta" + input: "sub_100" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_440" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta" + input: "^AssignVariableOp_449" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_450" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m" + input: "add_340" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_441" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m" + input: "^AssignVariableOp_450" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_451" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v" + input: "add_341" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_442" + op: "ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v" + input: "^AssignVariableOp_451" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/self/query/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_m" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/self/query/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_v" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_546/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_546/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_546" + op: "Mul" + input: "Mul_546/x" + input: "Mul_546/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_547/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_547" + op: "Mul" + input: "Mul_547/x" + input: "clip_by_global_norm/clip_by_global_norm/_101" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_343" + op: "AddV2" + input: "Mul_546" + input: "Mul_547" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_548/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_548/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_548" + op: "Mul" + input: "Mul_548/x" + input: "Mul_548/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_101" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_101" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_549/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_549" + op: "Mul" + input: "Mul_549/x" + input: "Square_101" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_344" + op: "AddV2" + input: "Mul_548" + input: "Mul_549" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_101" + op: "Sqrt" + input: "add_344" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_345/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_345" + op: "AddV2" + input: "Sqrt_101" + input: "add_345/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_102" + op: "RealDiv" + input: "add_343" + input: "add_345" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_443" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_550/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_550" + op: "Mul" + input: "mul_550/x" + input: "ReadVariableOp_443" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_346" + op: "AddV2" + input: "truediv_102" + input: "mul_550" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_551" + op: "Mul" + input: "PolynomialDecay" + input: "add_346" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_444" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_101" + op: "Sub" + input: "ReadVariableOp_444" + input: "mul_551" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_452" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel" + input: "sub_101" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_445" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel" + input: "^AssignVariableOp_452" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_453" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_m" + input: "add_343" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_446" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_m" + input: "^AssignVariableOp_453" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_454" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_v" + input: "add_344" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_447" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_v" + input: "^AssignVariableOp_454" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/self/query/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_m" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/self/query/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_v" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/query/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_552/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_552/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_552" + op: "Mul" + input: "Mul_552/x" + input: "Mul_552/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_553/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_553" + op: "Mul" + input: "Mul_553/x" + input: "clip_by_global_norm/clip_by_global_norm/_102" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_347" + op: "AddV2" + input: "Mul_552" + input: "Mul_553" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_554/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_554/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_554" + op: "Mul" + input: "Mul_554/x" + input: "Mul_554/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_102" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_102" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_555/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_555" + op: "Mul" + input: "Mul_555/x" + input: "Square_102" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_348" + op: "AddV2" + input: "Mul_554" + input: "Mul_555" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_102" + op: "Sqrt" + input: "add_348" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_349/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_349" + op: "AddV2" + input: "Sqrt_102" + input: "add_349/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_103" + op: "RealDiv" + input: "add_347" + input: "add_349" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_556" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_103" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_448" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_102" + op: "Sub" + input: "ReadVariableOp_448" + input: "mul_556" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_455" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias" + input: "sub_102" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_449" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias" + input: "^AssignVariableOp_455" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_456" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_m" + input: "add_347" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_450" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_m" + input: "^AssignVariableOp_456" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_457" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_v" + input: "add_348" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_451" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_v" + input: "^AssignVariableOp_457" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/self/key/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_m" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/self/key/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_v" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_557/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_557/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_557" + op: "Mul" + input: "Mul_557/x" + input: "Mul_557/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_558/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_558" + op: "Mul" + input: "Mul_558/x" + input: "clip_by_global_norm/clip_by_global_norm/_103" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_350" + op: "AddV2" + input: "Mul_557" + input: "Mul_558" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_559/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_559/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_559" + op: "Mul" + input: "Mul_559/x" + input: "Mul_559/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_103" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_103" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_560/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_560" + op: "Mul" + input: "Mul_560/x" + input: "Square_103" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_351" + op: "AddV2" + input: "Mul_559" + input: "Mul_560" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_103" + op: "Sqrt" + input: "add_351" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_352/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_352" + op: "AddV2" + input: "Sqrt_103" + input: "add_352/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_104" + op: "RealDiv" + input: "add_350" + input: "add_352" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_452" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_561/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_561" + op: "Mul" + input: "mul_561/x" + input: "ReadVariableOp_452" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_353" + op: "AddV2" + input: "truediv_104" + input: "mul_561" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_562" + op: "Mul" + input: "PolynomialDecay" + input: "add_353" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_453" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_103" + op: "Sub" + input: "ReadVariableOp_453" + input: "mul_562" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_458" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel" + input: "sub_103" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_454" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel" + input: "^AssignVariableOp_458" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_459" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_m" + input: "add_350" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_455" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_m" + input: "^AssignVariableOp_459" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_460" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_v" + input: "add_351" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_456" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_v" + input: "^AssignVariableOp_460" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/self/key/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_m" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/self/key/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_v" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/key/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_563/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_563/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_563" + op: "Mul" + input: "Mul_563/x" + input: "Mul_563/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_564/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_564" + op: "Mul" + input: "Mul_564/x" + input: "clip_by_global_norm/clip_by_global_norm/_104" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_354" + op: "AddV2" + input: "Mul_563" + input: "Mul_564" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_565/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_565/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_565" + op: "Mul" + input: "Mul_565/x" + input: "Mul_565/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_104" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_104" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_566/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_566" + op: "Mul" + input: "Mul_566/x" + input: "Square_104" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_355" + op: "AddV2" + input: "Mul_565" + input: "Mul_566" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_104" + op: "Sqrt" + input: "add_355" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_356/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_356" + op: "AddV2" + input: "Sqrt_104" + input: "add_356/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_105" + op: "RealDiv" + input: "add_354" + input: "add_356" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_567" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_105" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_457" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_104" + op: "Sub" + input: "ReadVariableOp_457" + input: "mul_567" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_461" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias" + input: "sub_104" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_458" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias" + input: "^AssignVariableOp_461" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_462" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_m" + input: "add_354" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_459" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_m" + input: "^AssignVariableOp_462" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_463" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_v" + input: "add_355" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_460" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_v" + input: "^AssignVariableOp_463" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/self/value/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_m" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/self/value/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_v" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_568/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_568/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_568" + op: "Mul" + input: "Mul_568/x" + input: "Mul_568/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_569/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_569" + op: "Mul" + input: "Mul_569/x" + input: "clip_by_global_norm/clip_by_global_norm/_105" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_357" + op: "AddV2" + input: "Mul_568" + input: "Mul_569" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_570/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_570/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_570" + op: "Mul" + input: "Mul_570/x" + input: "Mul_570/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_105" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_105" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_571/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_571" + op: "Mul" + input: "Mul_571/x" + input: "Square_105" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_358" + op: "AddV2" + input: "Mul_570" + input: "Mul_571" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_105" + op: "Sqrt" + input: "add_358" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_359/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_359" + op: "AddV2" + input: "Sqrt_105" + input: "add_359/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_106" + op: "RealDiv" + input: "add_357" + input: "add_359" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_461" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_572/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_572" + op: "Mul" + input: "mul_572/x" + input: "ReadVariableOp_461" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_360" + op: "AddV2" + input: "truediv_106" + input: "mul_572" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_573" + op: "Mul" + input: "PolynomialDecay" + input: "add_360" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_462" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_105" + op: "Sub" + input: "ReadVariableOp_462" + input: "mul_573" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_464" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel" + input: "sub_105" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_463" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel" + input: "^AssignVariableOp_464" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_465" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_m" + input: "add_357" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_464" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_m" + input: "^AssignVariableOp_465" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_466" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_v" + input: "add_358" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_465" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_v" + input: "^AssignVariableOp_466" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/self/value/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_m" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/self/value/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_v" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/self/value/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_574/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_574/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_574" + op: "Mul" + input: "Mul_574/x" + input: "Mul_574/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_575/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_575" + op: "Mul" + input: "Mul_575/x" + input: "clip_by_global_norm/clip_by_global_norm/_106" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_361" + op: "AddV2" + input: "Mul_574" + input: "Mul_575" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_576/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_576/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_576" + op: "Mul" + input: "Mul_576/x" + input: "Mul_576/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_106" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_106" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_577/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_577" + op: "Mul" + input: "Mul_577/x" + input: "Square_106" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_362" + op: "AddV2" + input: "Mul_576" + input: "Mul_577" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_106" + op: "Sqrt" + input: "add_362" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_363/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_363" + op: "AddV2" + input: "Sqrt_106" + input: "add_363/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_107" + op: "RealDiv" + input: "add_361" + input: "add_363" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_578" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_107" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_466" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_106" + op: "Sub" + input: "ReadVariableOp_466" + input: "mul_578" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_467" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias" + input: "sub_106" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_467" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias" + input: "^AssignVariableOp_467" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_468" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_m" + input: "add_361" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_468" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_m" + input: "^AssignVariableOp_468" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_469" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_v" + input: "add_362" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_469" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_v" + input: "^AssignVariableOp_469" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_579/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_579/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_579" + op: "Mul" + input: "Mul_579/x" + input: "Mul_579/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_580/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_580" + op: "Mul" + input: "Mul_580/x" + input: "clip_by_global_norm/clip_by_global_norm/_107" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_364" + op: "AddV2" + input: "Mul_579" + input: "Mul_580" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_581/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_581/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_581" + op: "Mul" + input: "Mul_581/x" + input: "Mul_581/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_107" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_107" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_582/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_582" + op: "Mul" + input: "Mul_582/x" + input: "Square_107" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_365" + op: "AddV2" + input: "Mul_581" + input: "Mul_582" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_107" + op: "Sqrt" + input: "add_365" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_366/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_366" + op: "AddV2" + input: "Sqrt_107" + input: "add_366/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_108" + op: "RealDiv" + input: "add_364" + input: "add_366" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_470" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_583/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_583" + op: "Mul" + input: "mul_583/x" + input: "ReadVariableOp_470" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_367" + op: "AddV2" + input: "truediv_108" + input: "mul_583" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_584" + op: "Mul" + input: "PolynomialDecay" + input: "add_367" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_471" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_107" + op: "Sub" + input: "ReadVariableOp_471" + input: "mul_584" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_470" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel" + input: "sub_107" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_472" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel" + input: "^AssignVariableOp_470" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_471" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m" + input: "add_364" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_473" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m" + input: "^AssignVariableOp_471" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_472" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v" + input: "add_365" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_474" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v" + input: "^AssignVariableOp_472" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_m" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_v" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_585/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_585/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_585" + op: "Mul" + input: "Mul_585/x" + input: "Mul_585/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_586/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_586" + op: "Mul" + input: "Mul_586/x" + input: "clip_by_global_norm/clip_by_global_norm/_108" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_368" + op: "AddV2" + input: "Mul_585" + input: "Mul_586" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_587/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_587/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_587" + op: "Mul" + input: "Mul_587/x" + input: "Mul_587/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_108" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_108" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_588/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_588" + op: "Mul" + input: "Mul_588/x" + input: "Square_108" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_369" + op: "AddV2" + input: "Mul_587" + input: "Mul_588" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_108" + op: "Sqrt" + input: "add_369" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_370/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_370" + op: "AddV2" + input: "Sqrt_108" + input: "add_370/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_109" + op: "RealDiv" + input: "add_368" + input: "add_370" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_589" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_109" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_475" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_108" + op: "Sub" + input: "ReadVariableOp_475" + input: "mul_589" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_473" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias" + input: "sub_108" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_476" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias" + input: "^AssignVariableOp_473" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_474" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_m" + input: "add_368" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_477" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_m" + input: "^AssignVariableOp_474" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_475" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_v" + input: "add_369" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_478" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_v" + input: "^AssignVariableOp_475" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_590/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_590/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_590" + op: "Mul" + input: "Mul_590/x" + input: "Mul_590/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_591/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_591" + op: "Mul" + input: "Mul_591/x" + input: "clip_by_global_norm/clip_by_global_norm/_109" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_371" + op: "AddV2" + input: "Mul_590" + input: "Mul_591" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_592/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_592/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_592" + op: "Mul" + input: "Mul_592/x" + input: "Mul_592/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_109" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_109" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_593/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_593" + op: "Mul" + input: "Mul_593/x" + input: "Square_109" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_372" + op: "AddV2" + input: "Mul_592" + input: "Mul_593" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_109" + op: "Sqrt" + input: "add_372" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_373/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_373" + op: "AddV2" + input: "Sqrt_109" + input: "add_373/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_110" + op: "RealDiv" + input: "add_371" + input: "add_373" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_594" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_110" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_479" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_109" + op: "Sub" + input: "ReadVariableOp_479" + input: "mul_594" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_476" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma" + input: "sub_109" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_480" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma" + input: "^AssignVariableOp_476" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_477" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m" + input: "add_371" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_481" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m" + input: "^AssignVariableOp_477" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_478" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v" + input: "add_372" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_482" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v" + input: "^AssignVariableOp_478" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_595/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_595/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_595" + op: "Mul" + input: "Mul_595/x" + input: "Mul_595/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_596/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_596" + op: "Mul" + input: "Mul_596/x" + input: "clip_by_global_norm/clip_by_global_norm/_110" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_374" + op: "AddV2" + input: "Mul_595" + input: "Mul_596" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_597/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_597/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_597" + op: "Mul" + input: "Mul_597/x" + input: "Mul_597/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_110" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_110" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_598/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_598" + op: "Mul" + input: "Mul_598/x" + input: "Square_110" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_375" + op: "AddV2" + input: "Mul_597" + input: "Mul_598" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_110" + op: "Sqrt" + input: "add_375" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_376/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_376" + op: "AddV2" + input: "Sqrt_110" + input: "add_376/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_111" + op: "RealDiv" + input: "add_374" + input: "add_376" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_599" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_111" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_483" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_110" + op: "Sub" + input: "ReadVariableOp_483" + input: "mul_599" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_479" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta" + input: "sub_110" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_484" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta" + input: "^AssignVariableOp_479" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_480" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m" + input: "add_374" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_485" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m" + input: "^AssignVariableOp_480" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_481" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v" + input: "add_375" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_486" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v" + input: "^AssignVariableOp_481" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_600/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_600/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_600" + op: "Mul" + input: "Mul_600/x" + input: "Mul_600/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_601/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_601" + op: "Mul" + input: "Mul_601/x" + input: "clip_by_global_norm/clip_by_global_norm/_111" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_377" + op: "AddV2" + input: "Mul_600" + input: "Mul_601" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_602/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_602/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_602" + op: "Mul" + input: "Mul_602/x" + input: "Mul_602/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_111" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_111" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_603/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_603" + op: "Mul" + input: "Mul_603/x" + input: "Square_111" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_378" + op: "AddV2" + input: "Mul_602" + input: "Mul_603" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_111" + op: "Sqrt" + input: "add_378" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_379/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_379" + op: "AddV2" + input: "Sqrt_111" + input: "add_379/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_112" + op: "RealDiv" + input: "add_377" + input: "add_379" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_487" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_604/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_604" + op: "Mul" + input: "mul_604/x" + input: "ReadVariableOp_487" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_380" + op: "AddV2" + input: "truediv_112" + input: "mul_604" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_605" + op: "Mul" + input: "PolynomialDecay" + input: "add_380" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_488" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_111" + op: "Sub" + input: "ReadVariableOp_488" + input: "mul_605" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_482" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel" + input: "sub_111" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_489" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel" + input: "^AssignVariableOp_482" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_483" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m" + input: "add_377" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_490" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m" + input: "^AssignVariableOp_483" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_484" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v" + input: "add_378" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_491" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v" + input: "^AssignVariableOp_484" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/intermediate/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_m" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/intermediate/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_v" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_606/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_606/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_606" + op: "Mul" + input: "Mul_606/x" + input: "Mul_606/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_607/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_607" + op: "Mul" + input: "Mul_607/x" + input: "clip_by_global_norm/clip_by_global_norm/_112" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_381" + op: "AddV2" + input: "Mul_606" + input: "Mul_607" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_608/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_608/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_608" + op: "Mul" + input: "Mul_608/x" + input: "Mul_608/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_112" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_112" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_609/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_609" + op: "Mul" + input: "Mul_609/x" + input: "Square_112" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_382" + op: "AddV2" + input: "Mul_608" + input: "Mul_609" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_112" + op: "Sqrt" + input: "add_382" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_383/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_383" + op: "AddV2" + input: "Sqrt_112" + input: "add_383/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_113" + op: "RealDiv" + input: "add_381" + input: "add_383" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_610" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_113" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_492" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_112" + op: "Sub" + input: "ReadVariableOp_492" + input: "mul_610" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_485" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias" + input: "sub_112" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_493" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias" + input: "^AssignVariableOp_485" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_486" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_m" + input: "add_381" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_494" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_m" + input: "^AssignVariableOp_486" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_487" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_v" + input: "add_382" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_495" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_v" + input: "^AssignVariableOp_487" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_6/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_6/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel/adam_m" + input: "bert/encoder/layer_6/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_6/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_6/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel/adam_v" + input: "bert/encoder/layer_6/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_611/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_611/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_611" + op: "Mul" + input: "Mul_611/x" + input: "Mul_611/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_612/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_612" + op: "Mul" + input: "Mul_612/x" + input: "clip_by_global_norm/clip_by_global_norm/_113" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_384" + op: "AddV2" + input: "Mul_611" + input: "Mul_612" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_613/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_613/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_613" + op: "Mul" + input: "Mul_613/x" + input: "Mul_613/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_113" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_113" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_614/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_614" + op: "Mul" + input: "Mul_614/x" + input: "Square_113" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_385" + op: "AddV2" + input: "Mul_613" + input: "Mul_614" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_113" + op: "Sqrt" + input: "add_385" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_386/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_386" + op: "AddV2" + input: "Sqrt_113" + input: "add_386/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_114" + op: "RealDiv" + input: "add_384" + input: "add_386" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_496" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_615/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_615" + op: "Mul" + input: "mul_615/x" + input: "ReadVariableOp_496" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_387" + op: "AddV2" + input: "truediv_114" + input: "mul_615" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_616" + op: "Mul" + input: "PolynomialDecay" + input: "add_387" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_497" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_113" + op: "Sub" + input: "ReadVariableOp_497" + input: "mul_616" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_488" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel" + input: "sub_113" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_498" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel" + input: "^AssignVariableOp_488" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_489" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel/adam_m" + input: "add_384" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_499" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel/adam_m" + input: "^AssignVariableOp_489" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_490" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel/adam_v" + input: "add_385" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_500" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel/adam_v" + input: "^AssignVariableOp_490" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/dense/bias/adam_m" + input: "bert/encoder/layer_6/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/dense/bias/adam_v" + input: "bert/encoder/layer_6/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_617/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_617/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_617" + op: "Mul" + input: "Mul_617/x" + input: "Mul_617/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_618/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_618" + op: "Mul" + input: "Mul_618/x" + input: "clip_by_global_norm/clip_by_global_norm/_114" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_388" + op: "AddV2" + input: "Mul_617" + input: "Mul_618" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_619/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_619/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_619" + op: "Mul" + input: "Mul_619/x" + input: "Mul_619/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_114" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_114" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_620/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_620" + op: "Mul" + input: "Mul_620/x" + input: "Square_114" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_389" + op: "AddV2" + input: "Mul_619" + input: "Mul_620" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_114" + op: "Sqrt" + input: "add_389" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_390/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_390" + op: "AddV2" + input: "Sqrt_114" + input: "add_390/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_115" + op: "RealDiv" + input: "add_388" + input: "add_390" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_621" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_115" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_501" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_114" + op: "Sub" + input: "ReadVariableOp_501" + input: "mul_621" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_491" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/dense/bias" + input: "sub_114" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_502" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/bias" + input: "^AssignVariableOp_491" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_492" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/dense/bias/adam_m" + input: "add_388" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_503" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/bias/adam_m" + input: "^AssignVariableOp_492" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_493" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/dense/bias/adam_v" + input: "add_389" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_504" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/bias/adam_v" + input: "^AssignVariableOp_493" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_622/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_622/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_622" + op: "Mul" + input: "Mul_622/x" + input: "Mul_622/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_623/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_623" + op: "Mul" + input: "Mul_623/x" + input: "clip_by_global_norm/clip_by_global_norm/_115" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_391" + op: "AddV2" + input: "Mul_622" + input: "Mul_623" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_624/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_624/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_624" + op: "Mul" + input: "Mul_624/x" + input: "Mul_624/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_115" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_115" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_625/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_625" + op: "Mul" + input: "Mul_625/x" + input: "Square_115" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_392" + op: "AddV2" + input: "Mul_624" + input: "Mul_625" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_115" + op: "Sqrt" + input: "add_392" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_393/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_393" + op: "AddV2" + input: "Sqrt_115" + input: "add_393/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_116" + op: "RealDiv" + input: "add_391" + input: "add_393" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_626" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_116" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_505" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_115" + op: "Sub" + input: "ReadVariableOp_505" + input: "mul_626" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_494" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma" + input: "sub_115" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_506" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma" + input: "^AssignVariableOp_494" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_495" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m" + input: "add_391" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_507" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m" + input: "^AssignVariableOp_495" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_496" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v" + input: "add_392" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_508" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v" + input: "^AssignVariableOp_496" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_627/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_627/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_627" + op: "Mul" + input: "Mul_627/x" + input: "Mul_627/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_628/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_628" + op: "Mul" + input: "Mul_628/x" + input: "clip_by_global_norm/clip_by_global_norm/_116" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_394" + op: "AddV2" + input: "Mul_627" + input: "Mul_628" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_629/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_629/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_629" + op: "Mul" + input: "Mul_629/x" + input: "Mul_629/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_116" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_116" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_630/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_630" + op: "Mul" + input: "Mul_630/x" + input: "Square_116" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_395" + op: "AddV2" + input: "Mul_629" + input: "Mul_630" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_116" + op: "Sqrt" + input: "add_395" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_396/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_396" + op: "AddV2" + input: "Sqrt_116" + input: "add_396/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_117" + op: "RealDiv" + input: "add_394" + input: "add_396" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_631" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_117" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_509" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_116" + op: "Sub" + input: "ReadVariableOp_509" + input: "mul_631" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_497" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta" + input: "sub_116" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_510" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta" + input: "^AssignVariableOp_497" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_498" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m" + input: "add_394" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_511" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m" + input: "^AssignVariableOp_498" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_499" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v" + input: "add_395" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_512" + op: "ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v" + input: "^AssignVariableOp_499" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/self/query/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_m" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/self/query/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_v" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_632/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_632/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_632" + op: "Mul" + input: "Mul_632/x" + input: "Mul_632/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_633/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_633" + op: "Mul" + input: "Mul_633/x" + input: "clip_by_global_norm/clip_by_global_norm/_117" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_397" + op: "AddV2" + input: "Mul_632" + input: "Mul_633" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_634/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_634/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_634" + op: "Mul" + input: "Mul_634/x" + input: "Mul_634/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_117" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_117" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_635/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_635" + op: "Mul" + input: "Mul_635/x" + input: "Square_117" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_398" + op: "AddV2" + input: "Mul_634" + input: "Mul_635" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_117" + op: "Sqrt" + input: "add_398" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_399/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_399" + op: "AddV2" + input: "Sqrt_117" + input: "add_399/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_118" + op: "RealDiv" + input: "add_397" + input: "add_399" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_513" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_636/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_636" + op: "Mul" + input: "mul_636/x" + input: "ReadVariableOp_513" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_400" + op: "AddV2" + input: "truediv_118" + input: "mul_636" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_637" + op: "Mul" + input: "PolynomialDecay" + input: "add_400" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_514" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_117" + op: "Sub" + input: "ReadVariableOp_514" + input: "mul_637" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_500" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel" + input: "sub_117" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_515" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel" + input: "^AssignVariableOp_500" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_501" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_m" + input: "add_397" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_516" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_m" + input: "^AssignVariableOp_501" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_502" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_v" + input: "add_398" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_517" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_v" + input: "^AssignVariableOp_502" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/self/query/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_m" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/self/query/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_v" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/query/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_638/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_638/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_638" + op: "Mul" + input: "Mul_638/x" + input: "Mul_638/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_639/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_639" + op: "Mul" + input: "Mul_639/x" + input: "clip_by_global_norm/clip_by_global_norm/_118" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_401" + op: "AddV2" + input: "Mul_638" + input: "Mul_639" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_640/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_640/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_640" + op: "Mul" + input: "Mul_640/x" + input: "Mul_640/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_118" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_118" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_641/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_641" + op: "Mul" + input: "Mul_641/x" + input: "Square_118" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_402" + op: "AddV2" + input: "Mul_640" + input: "Mul_641" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_118" + op: "Sqrt" + input: "add_402" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_403/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_403" + op: "AddV2" + input: "Sqrt_118" + input: "add_403/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_119" + op: "RealDiv" + input: "add_401" + input: "add_403" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_642" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_119" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_518" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_118" + op: "Sub" + input: "ReadVariableOp_518" + input: "mul_642" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_503" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias" + input: "sub_118" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_519" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias" + input: "^AssignVariableOp_503" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_504" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_m" + input: "add_401" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_520" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_m" + input: "^AssignVariableOp_504" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_505" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_v" + input: "add_402" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_521" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_v" + input: "^AssignVariableOp_505" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/self/key/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_m" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/self/key/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_v" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_643/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_643/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_643" + op: "Mul" + input: "Mul_643/x" + input: "Mul_643/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_644/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_644" + op: "Mul" + input: "Mul_644/x" + input: "clip_by_global_norm/clip_by_global_norm/_119" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_404" + op: "AddV2" + input: "Mul_643" + input: "Mul_644" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_645/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_645/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_645" + op: "Mul" + input: "Mul_645/x" + input: "Mul_645/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_119" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_119" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_646/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_646" + op: "Mul" + input: "Mul_646/x" + input: "Square_119" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_405" + op: "AddV2" + input: "Mul_645" + input: "Mul_646" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_119" + op: "Sqrt" + input: "add_405" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_406/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_406" + op: "AddV2" + input: "Sqrt_119" + input: "add_406/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_120" + op: "RealDiv" + input: "add_404" + input: "add_406" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_522" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_647/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_647" + op: "Mul" + input: "mul_647/x" + input: "ReadVariableOp_522" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_407" + op: "AddV2" + input: "truediv_120" + input: "mul_647" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_648" + op: "Mul" + input: "PolynomialDecay" + input: "add_407" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_523" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_119" + op: "Sub" + input: "ReadVariableOp_523" + input: "mul_648" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_506" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel" + input: "sub_119" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_524" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel" + input: "^AssignVariableOp_506" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_507" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_m" + input: "add_404" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_525" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_m" + input: "^AssignVariableOp_507" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_508" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_v" + input: "add_405" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_526" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_v" + input: "^AssignVariableOp_508" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/self/key/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_m" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/self/key/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_v" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/key/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_649/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_649/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_649" + op: "Mul" + input: "Mul_649/x" + input: "Mul_649/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_650/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_650" + op: "Mul" + input: "Mul_650/x" + input: "clip_by_global_norm/clip_by_global_norm/_120" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_408" + op: "AddV2" + input: "Mul_649" + input: "Mul_650" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_651/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_651/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_651" + op: "Mul" + input: "Mul_651/x" + input: "Mul_651/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_120" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_120" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_652/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_652" + op: "Mul" + input: "Mul_652/x" + input: "Square_120" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_409" + op: "AddV2" + input: "Mul_651" + input: "Mul_652" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_120" + op: "Sqrt" + input: "add_409" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_410/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_410" + op: "AddV2" + input: "Sqrt_120" + input: "add_410/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_121" + op: "RealDiv" + input: "add_408" + input: "add_410" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_653" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_121" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_527" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_120" + op: "Sub" + input: "ReadVariableOp_527" + input: "mul_653" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_509" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias" + input: "sub_120" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_528" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias" + input: "^AssignVariableOp_509" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_510" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_m" + input: "add_408" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_529" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_m" + input: "^AssignVariableOp_510" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_511" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_v" + input: "add_409" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_530" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_v" + input: "^AssignVariableOp_511" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/self/value/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_m" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/self/value/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_v" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_654/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_654/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_654" + op: "Mul" + input: "Mul_654/x" + input: "Mul_654/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_655/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_655" + op: "Mul" + input: "Mul_655/x" + input: "clip_by_global_norm/clip_by_global_norm/_121" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_411" + op: "AddV2" + input: "Mul_654" + input: "Mul_655" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_656/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_656/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_656" + op: "Mul" + input: "Mul_656/x" + input: "Mul_656/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_121" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_121" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_657/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_657" + op: "Mul" + input: "Mul_657/x" + input: "Square_121" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_412" + op: "AddV2" + input: "Mul_656" + input: "Mul_657" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_121" + op: "Sqrt" + input: "add_412" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_413/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_413" + op: "AddV2" + input: "Sqrt_121" + input: "add_413/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_122" + op: "RealDiv" + input: "add_411" + input: "add_413" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_531" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_658/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_658" + op: "Mul" + input: "mul_658/x" + input: "ReadVariableOp_531" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_414" + op: "AddV2" + input: "truediv_122" + input: "mul_658" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_659" + op: "Mul" + input: "PolynomialDecay" + input: "add_414" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_532" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_121" + op: "Sub" + input: "ReadVariableOp_532" + input: "mul_659" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_512" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel" + input: "sub_121" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_533" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel" + input: "^AssignVariableOp_512" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_513" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_m" + input: "add_411" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_534" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_m" + input: "^AssignVariableOp_513" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_514" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_v" + input: "add_412" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_535" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_v" + input: "^AssignVariableOp_514" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/self/value/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_m" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/self/value/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_v" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/self/value/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_660/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_660/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_660" + op: "Mul" + input: "Mul_660/x" + input: "Mul_660/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_661/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_661" + op: "Mul" + input: "Mul_661/x" + input: "clip_by_global_norm/clip_by_global_norm/_122" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_415" + op: "AddV2" + input: "Mul_660" + input: "Mul_661" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_662/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_662/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_662" + op: "Mul" + input: "Mul_662/x" + input: "Mul_662/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_122" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_122" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_663/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_663" + op: "Mul" + input: "Mul_663/x" + input: "Square_122" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_416" + op: "AddV2" + input: "Mul_662" + input: "Mul_663" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_122" + op: "Sqrt" + input: "add_416" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_417/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_417" + op: "AddV2" + input: "Sqrt_122" + input: "add_417/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_123" + op: "RealDiv" + input: "add_415" + input: "add_417" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_664" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_123" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_536" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_122" + op: "Sub" + input: "ReadVariableOp_536" + input: "mul_664" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_515" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias" + input: "sub_122" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_537" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias" + input: "^AssignVariableOp_515" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_516" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_m" + input: "add_415" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_538" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_m" + input: "^AssignVariableOp_516" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_517" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_v" + input: "add_416" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_539" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_v" + input: "^AssignVariableOp_517" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_665/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_665/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_665" + op: "Mul" + input: "Mul_665/x" + input: "Mul_665/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_666/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_666" + op: "Mul" + input: "Mul_666/x" + input: "clip_by_global_norm/clip_by_global_norm/_123" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_418" + op: "AddV2" + input: "Mul_665" + input: "Mul_666" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_667/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_667/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_667" + op: "Mul" + input: "Mul_667/x" + input: "Mul_667/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_123" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_123" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_668/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_668" + op: "Mul" + input: "Mul_668/x" + input: "Square_123" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_419" + op: "AddV2" + input: "Mul_667" + input: "Mul_668" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_123" + op: "Sqrt" + input: "add_419" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_420/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_420" + op: "AddV2" + input: "Sqrt_123" + input: "add_420/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_124" + op: "RealDiv" + input: "add_418" + input: "add_420" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_540" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_669/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_669" + op: "Mul" + input: "mul_669/x" + input: "ReadVariableOp_540" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_421" + op: "AddV2" + input: "truediv_124" + input: "mul_669" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_670" + op: "Mul" + input: "PolynomialDecay" + input: "add_421" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_541" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_123" + op: "Sub" + input: "ReadVariableOp_541" + input: "mul_670" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_518" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel" + input: "sub_123" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_542" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel" + input: "^AssignVariableOp_518" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_519" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m" + input: "add_418" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_543" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m" + input: "^AssignVariableOp_519" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_520" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v" + input: "add_419" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_544" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v" + input: "^AssignVariableOp_520" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_m" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_v" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_671/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_671/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_671" + op: "Mul" + input: "Mul_671/x" + input: "Mul_671/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_672/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_672" + op: "Mul" + input: "Mul_672/x" + input: "clip_by_global_norm/clip_by_global_norm/_124" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_422" + op: "AddV2" + input: "Mul_671" + input: "Mul_672" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_673/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_673/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_673" + op: "Mul" + input: "Mul_673/x" + input: "Mul_673/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_124" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_124" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_674/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_674" + op: "Mul" + input: "Mul_674/x" + input: "Square_124" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_423" + op: "AddV2" + input: "Mul_673" + input: "Mul_674" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_124" + op: "Sqrt" + input: "add_423" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_424/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_424" + op: "AddV2" + input: "Sqrt_124" + input: "add_424/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_125" + op: "RealDiv" + input: "add_422" + input: "add_424" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_675" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_125" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_545" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_124" + op: "Sub" + input: "ReadVariableOp_545" + input: "mul_675" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_521" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias" + input: "sub_124" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_546" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias" + input: "^AssignVariableOp_521" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_522" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_m" + input: "add_422" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_547" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_m" + input: "^AssignVariableOp_522" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_523" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_v" + input: "add_423" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_548" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_v" + input: "^AssignVariableOp_523" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_676/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_676/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_676" + op: "Mul" + input: "Mul_676/x" + input: "Mul_676/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_677/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_677" + op: "Mul" + input: "Mul_677/x" + input: "clip_by_global_norm/clip_by_global_norm/_125" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_425" + op: "AddV2" + input: "Mul_676" + input: "Mul_677" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_678/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_678/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_678" + op: "Mul" + input: "Mul_678/x" + input: "Mul_678/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_125" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_125" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_679/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_679" + op: "Mul" + input: "Mul_679/x" + input: "Square_125" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_426" + op: "AddV2" + input: "Mul_678" + input: "Mul_679" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_125" + op: "Sqrt" + input: "add_426" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_427/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_427" + op: "AddV2" + input: "Sqrt_125" + input: "add_427/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_126" + op: "RealDiv" + input: "add_425" + input: "add_427" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_680" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_126" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_549" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_125" + op: "Sub" + input: "ReadVariableOp_549" + input: "mul_680" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_524" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma" + input: "sub_125" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_550" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma" + input: "^AssignVariableOp_524" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_525" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m" + input: "add_425" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_551" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m" + input: "^AssignVariableOp_525" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_526" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v" + input: "add_426" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_552" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v" + input: "^AssignVariableOp_526" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_681/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_681/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_681" + op: "Mul" + input: "Mul_681/x" + input: "Mul_681/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_682/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_682" + op: "Mul" + input: "Mul_682/x" + input: "clip_by_global_norm/clip_by_global_norm/_126" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_428" + op: "AddV2" + input: "Mul_681" + input: "Mul_682" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_683/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_683/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_683" + op: "Mul" + input: "Mul_683/x" + input: "Mul_683/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_126" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_126" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_684/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_684" + op: "Mul" + input: "Mul_684/x" + input: "Square_126" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_429" + op: "AddV2" + input: "Mul_683" + input: "Mul_684" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_126" + op: "Sqrt" + input: "add_429" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_430/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_430" + op: "AddV2" + input: "Sqrt_126" + input: "add_430/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_127" + op: "RealDiv" + input: "add_428" + input: "add_430" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_685" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_127" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_553" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_126" + op: "Sub" + input: "ReadVariableOp_553" + input: "mul_685" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_527" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta" + input: "sub_126" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_554" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta" + input: "^AssignVariableOp_527" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_528" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m" + input: "add_428" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_555" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m" + input: "^AssignVariableOp_528" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_529" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v" + input: "add_429" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_556" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v" + input: "^AssignVariableOp_529" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_686/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_686/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_686" + op: "Mul" + input: "Mul_686/x" + input: "Mul_686/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_687/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_687" + op: "Mul" + input: "Mul_687/x" + input: "clip_by_global_norm/clip_by_global_norm/_127" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_431" + op: "AddV2" + input: "Mul_686" + input: "Mul_687" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_688/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_688/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_688" + op: "Mul" + input: "Mul_688/x" + input: "Mul_688/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_127" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_127" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_689/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_689" + op: "Mul" + input: "Mul_689/x" + input: "Square_127" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_432" + op: "AddV2" + input: "Mul_688" + input: "Mul_689" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_127" + op: "Sqrt" + input: "add_432" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_433/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_433" + op: "AddV2" + input: "Sqrt_127" + input: "add_433/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_128" + op: "RealDiv" + input: "add_431" + input: "add_433" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_557" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_690/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_690" + op: "Mul" + input: "mul_690/x" + input: "ReadVariableOp_557" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_434" + op: "AddV2" + input: "truediv_128" + input: "mul_690" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_691" + op: "Mul" + input: "PolynomialDecay" + input: "add_434" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_558" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_127" + op: "Sub" + input: "ReadVariableOp_558" + input: "mul_691" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_530" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel" + input: "sub_127" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_559" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel" + input: "^AssignVariableOp_530" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_531" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m" + input: "add_431" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_560" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m" + input: "^AssignVariableOp_531" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_532" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v" + input: "add_432" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_561" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v" + input: "^AssignVariableOp_532" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/intermediate/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_m" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/intermediate/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_v" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_692/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_692/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_692" + op: "Mul" + input: "Mul_692/x" + input: "Mul_692/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_693/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_693" + op: "Mul" + input: "Mul_693/x" + input: "clip_by_global_norm/clip_by_global_norm/_128" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_435" + op: "AddV2" + input: "Mul_692" + input: "Mul_693" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_694/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_694/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_694" + op: "Mul" + input: "Mul_694/x" + input: "Mul_694/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_128" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_128" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_695/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_695" + op: "Mul" + input: "Mul_695/x" + input: "Square_128" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_436" + op: "AddV2" + input: "Mul_694" + input: "Mul_695" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_128" + op: "Sqrt" + input: "add_436" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_437/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_437" + op: "AddV2" + input: "Sqrt_128" + input: "add_437/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_129" + op: "RealDiv" + input: "add_435" + input: "add_437" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_696" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_129" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_562" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_128" + op: "Sub" + input: "ReadVariableOp_562" + input: "mul_696" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_533" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias" + input: "sub_128" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_563" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias" + input: "^AssignVariableOp_533" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_534" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_m" + input: "add_435" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_564" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_m" + input: "^AssignVariableOp_534" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_535" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_v" + input: "add_436" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_565" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_v" + input: "^AssignVariableOp_535" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_7/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_7/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel/adam_m" + input: "bert/encoder/layer_7/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_7/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_7/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel/adam_v" + input: "bert/encoder/layer_7/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_697/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_697/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_697" + op: "Mul" + input: "Mul_697/x" + input: "Mul_697/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_698/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_698" + op: "Mul" + input: "Mul_698/x" + input: "clip_by_global_norm/clip_by_global_norm/_129" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_438" + op: "AddV2" + input: "Mul_697" + input: "Mul_698" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_699/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_699/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_699" + op: "Mul" + input: "Mul_699/x" + input: "Mul_699/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_129" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_129" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_700/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_700" + op: "Mul" + input: "Mul_700/x" + input: "Square_129" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_439" + op: "AddV2" + input: "Mul_699" + input: "Mul_700" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_129" + op: "Sqrt" + input: "add_439" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_440/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_440" + op: "AddV2" + input: "Sqrt_129" + input: "add_440/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_130" + op: "RealDiv" + input: "add_438" + input: "add_440" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_566" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_701/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_701" + op: "Mul" + input: "mul_701/x" + input: "ReadVariableOp_566" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_441" + op: "AddV2" + input: "truediv_130" + input: "mul_701" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_702" + op: "Mul" + input: "PolynomialDecay" + input: "add_441" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_567" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_129" + op: "Sub" + input: "ReadVariableOp_567" + input: "mul_702" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_536" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel" + input: "sub_129" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_568" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel" + input: "^AssignVariableOp_536" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_537" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel/adam_m" + input: "add_438" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_569" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel/adam_m" + input: "^AssignVariableOp_537" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_538" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel/adam_v" + input: "add_439" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_570" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel/adam_v" + input: "^AssignVariableOp_538" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/dense/bias/adam_m" + input: "bert/encoder/layer_7/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/dense/bias/adam_v" + input: "bert/encoder/layer_7/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_703/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_703/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_703" + op: "Mul" + input: "Mul_703/x" + input: "Mul_703/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_704/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_704" + op: "Mul" + input: "Mul_704/x" + input: "clip_by_global_norm/clip_by_global_norm/_130" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_442" + op: "AddV2" + input: "Mul_703" + input: "Mul_704" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_705/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_705/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_705" + op: "Mul" + input: "Mul_705/x" + input: "Mul_705/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_130" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_130" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_706/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_706" + op: "Mul" + input: "Mul_706/x" + input: "Square_130" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_443" + op: "AddV2" + input: "Mul_705" + input: "Mul_706" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_130" + op: "Sqrt" + input: "add_443" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_444/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_444" + op: "AddV2" + input: "Sqrt_130" + input: "add_444/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_131" + op: "RealDiv" + input: "add_442" + input: "add_444" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_707" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_131" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_571" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_130" + op: "Sub" + input: "ReadVariableOp_571" + input: "mul_707" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_539" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/dense/bias" + input: "sub_130" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_572" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/bias" + input: "^AssignVariableOp_539" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_540" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/dense/bias/adam_m" + input: "add_442" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_573" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/bias/adam_m" + input: "^AssignVariableOp_540" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_541" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/dense/bias/adam_v" + input: "add_443" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_574" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/bias/adam_v" + input: "^AssignVariableOp_541" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_708/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_708/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_708" + op: "Mul" + input: "Mul_708/x" + input: "Mul_708/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_709/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_709" + op: "Mul" + input: "Mul_709/x" + input: "clip_by_global_norm/clip_by_global_norm/_131" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_445" + op: "AddV2" + input: "Mul_708" + input: "Mul_709" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_710/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_710/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_710" + op: "Mul" + input: "Mul_710/x" + input: "Mul_710/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_131" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_131" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_711/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_711" + op: "Mul" + input: "Mul_711/x" + input: "Square_131" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_446" + op: "AddV2" + input: "Mul_710" + input: "Mul_711" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_131" + op: "Sqrt" + input: "add_446" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_447/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_447" + op: "AddV2" + input: "Sqrt_131" + input: "add_447/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_132" + op: "RealDiv" + input: "add_445" + input: "add_447" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_712" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_132" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_575" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_131" + op: "Sub" + input: "ReadVariableOp_575" + input: "mul_712" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_542" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma" + input: "sub_131" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_576" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma" + input: "^AssignVariableOp_542" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_543" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m" + input: "add_445" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_577" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m" + input: "^AssignVariableOp_543" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_544" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v" + input: "add_446" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_578" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v" + input: "^AssignVariableOp_544" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_713/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_713/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_713" + op: "Mul" + input: "Mul_713/x" + input: "Mul_713/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_714/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_714" + op: "Mul" + input: "Mul_714/x" + input: "clip_by_global_norm/clip_by_global_norm/_132" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_448" + op: "AddV2" + input: "Mul_713" + input: "Mul_714" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_715/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_715/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_715" + op: "Mul" + input: "Mul_715/x" + input: "Mul_715/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_132" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_132" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_716/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_716" + op: "Mul" + input: "Mul_716/x" + input: "Square_132" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_449" + op: "AddV2" + input: "Mul_715" + input: "Mul_716" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_132" + op: "Sqrt" + input: "add_449" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_450/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_450" + op: "AddV2" + input: "Sqrt_132" + input: "add_450/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_133" + op: "RealDiv" + input: "add_448" + input: "add_450" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_717" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_133" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_579" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_132" + op: "Sub" + input: "ReadVariableOp_579" + input: "mul_717" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_545" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta" + input: "sub_132" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_580" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta" + input: "^AssignVariableOp_545" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_546" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m" + input: "add_448" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_581" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m" + input: "^AssignVariableOp_546" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_547" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v" + input: "add_449" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_582" + op: "ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v" + input: "^AssignVariableOp_547" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/self/query/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_m" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/self/query/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_v" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_718/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_718/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_718" + op: "Mul" + input: "Mul_718/x" + input: "Mul_718/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_719/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_719" + op: "Mul" + input: "Mul_719/x" + input: "clip_by_global_norm/clip_by_global_norm/_133" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_451" + op: "AddV2" + input: "Mul_718" + input: "Mul_719" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_720/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_720/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_720" + op: "Mul" + input: "Mul_720/x" + input: "Mul_720/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_133" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_133" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_721/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_721" + op: "Mul" + input: "Mul_721/x" + input: "Square_133" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_452" + op: "AddV2" + input: "Mul_720" + input: "Mul_721" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_133" + op: "Sqrt" + input: "add_452" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_453/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_453" + op: "AddV2" + input: "Sqrt_133" + input: "add_453/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_134" + op: "RealDiv" + input: "add_451" + input: "add_453" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_583" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_722/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_722" + op: "Mul" + input: "mul_722/x" + input: "ReadVariableOp_583" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_454" + op: "AddV2" + input: "truediv_134" + input: "mul_722" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_723" + op: "Mul" + input: "PolynomialDecay" + input: "add_454" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_584" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_133" + op: "Sub" + input: "ReadVariableOp_584" + input: "mul_723" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_548" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel" + input: "sub_133" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_585" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel" + input: "^AssignVariableOp_548" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_549" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_m" + input: "add_451" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_586" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_m" + input: "^AssignVariableOp_549" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_550" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_v" + input: "add_452" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_587" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_v" + input: "^AssignVariableOp_550" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/self/query/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_m" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/self/query/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_v" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/query/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_724/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_724/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_724" + op: "Mul" + input: "Mul_724/x" + input: "Mul_724/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_725/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_725" + op: "Mul" + input: "Mul_725/x" + input: "clip_by_global_norm/clip_by_global_norm/_134" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_455" + op: "AddV2" + input: "Mul_724" + input: "Mul_725" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_726/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_726/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_726" + op: "Mul" + input: "Mul_726/x" + input: "Mul_726/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_134" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_134" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_727/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_727" + op: "Mul" + input: "Mul_727/x" + input: "Square_134" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_456" + op: "AddV2" + input: "Mul_726" + input: "Mul_727" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_134" + op: "Sqrt" + input: "add_456" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_457/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_457" + op: "AddV2" + input: "Sqrt_134" + input: "add_457/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_135" + op: "RealDiv" + input: "add_455" + input: "add_457" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_728" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_135" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_588" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_134" + op: "Sub" + input: "ReadVariableOp_588" + input: "mul_728" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_551" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias" + input: "sub_134" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_589" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias" + input: "^AssignVariableOp_551" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_552" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_m" + input: "add_455" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_590" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_m" + input: "^AssignVariableOp_552" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_553" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_v" + input: "add_456" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_591" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_v" + input: "^AssignVariableOp_553" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/self/key/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_m" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/self/key/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_v" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_729/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_729/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_729" + op: "Mul" + input: "Mul_729/x" + input: "Mul_729/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_730/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_730" + op: "Mul" + input: "Mul_730/x" + input: "clip_by_global_norm/clip_by_global_norm/_135" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_458" + op: "AddV2" + input: "Mul_729" + input: "Mul_730" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_731/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_731/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_731" + op: "Mul" + input: "Mul_731/x" + input: "Mul_731/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_135" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_135" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_732/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_732" + op: "Mul" + input: "Mul_732/x" + input: "Square_135" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_459" + op: "AddV2" + input: "Mul_731" + input: "Mul_732" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_135" + op: "Sqrt" + input: "add_459" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_460/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_460" + op: "AddV2" + input: "Sqrt_135" + input: "add_460/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_136" + op: "RealDiv" + input: "add_458" + input: "add_460" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_592" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_733/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_733" + op: "Mul" + input: "mul_733/x" + input: "ReadVariableOp_592" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_461" + op: "AddV2" + input: "truediv_136" + input: "mul_733" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_734" + op: "Mul" + input: "PolynomialDecay" + input: "add_461" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_593" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_135" + op: "Sub" + input: "ReadVariableOp_593" + input: "mul_734" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_554" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel" + input: "sub_135" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_594" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel" + input: "^AssignVariableOp_554" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_555" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_m" + input: "add_458" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_595" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_m" + input: "^AssignVariableOp_555" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_556" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_v" + input: "add_459" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_596" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_v" + input: "^AssignVariableOp_556" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/self/key/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_m" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/self/key/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_v" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/key/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_735/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_735/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_735" + op: "Mul" + input: "Mul_735/x" + input: "Mul_735/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_736/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_736" + op: "Mul" + input: "Mul_736/x" + input: "clip_by_global_norm/clip_by_global_norm/_136" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_462" + op: "AddV2" + input: "Mul_735" + input: "Mul_736" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_737/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_737/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_737" + op: "Mul" + input: "Mul_737/x" + input: "Mul_737/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_136" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_136" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_738/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_738" + op: "Mul" + input: "Mul_738/x" + input: "Square_136" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_463" + op: "AddV2" + input: "Mul_737" + input: "Mul_738" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_136" + op: "Sqrt" + input: "add_463" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_464/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_464" + op: "AddV2" + input: "Sqrt_136" + input: "add_464/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_137" + op: "RealDiv" + input: "add_462" + input: "add_464" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_739" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_137" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_597" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_136" + op: "Sub" + input: "ReadVariableOp_597" + input: "mul_739" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_557" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias" + input: "sub_136" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_598" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias" + input: "^AssignVariableOp_557" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_558" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_m" + input: "add_462" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_599" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_m" + input: "^AssignVariableOp_558" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_559" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_v" + input: "add_463" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_600" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_v" + input: "^AssignVariableOp_559" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/self/value/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_m" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/self/value/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_v" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_740/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_740/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_740" + op: "Mul" + input: "Mul_740/x" + input: "Mul_740/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_741/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_741" + op: "Mul" + input: "Mul_741/x" + input: "clip_by_global_norm/clip_by_global_norm/_137" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_465" + op: "AddV2" + input: "Mul_740" + input: "Mul_741" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_742/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_742/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_742" + op: "Mul" + input: "Mul_742/x" + input: "Mul_742/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_137" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_137" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_743/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_743" + op: "Mul" + input: "Mul_743/x" + input: "Square_137" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_466" + op: "AddV2" + input: "Mul_742" + input: "Mul_743" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_137" + op: "Sqrt" + input: "add_466" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_467/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_467" + op: "AddV2" + input: "Sqrt_137" + input: "add_467/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_138" + op: "RealDiv" + input: "add_465" + input: "add_467" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_601" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_744/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_744" + op: "Mul" + input: "mul_744/x" + input: "ReadVariableOp_601" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_468" + op: "AddV2" + input: "truediv_138" + input: "mul_744" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_745" + op: "Mul" + input: "PolynomialDecay" + input: "add_468" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_602" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_137" + op: "Sub" + input: "ReadVariableOp_602" + input: "mul_745" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_560" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel" + input: "sub_137" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_603" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel" + input: "^AssignVariableOp_560" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_561" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_m" + input: "add_465" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_604" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_m" + input: "^AssignVariableOp_561" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_562" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_v" + input: "add_466" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_605" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_v" + input: "^AssignVariableOp_562" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/self/value/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_m" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/self/value/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_v" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/self/value/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_746/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_746/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_746" + op: "Mul" + input: "Mul_746/x" + input: "Mul_746/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_747/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_747" + op: "Mul" + input: "Mul_747/x" + input: "clip_by_global_norm/clip_by_global_norm/_138" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_469" + op: "AddV2" + input: "Mul_746" + input: "Mul_747" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_748/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_748/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_748" + op: "Mul" + input: "Mul_748/x" + input: "Mul_748/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_138" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_138" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_749/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_749" + op: "Mul" + input: "Mul_749/x" + input: "Square_138" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_470" + op: "AddV2" + input: "Mul_748" + input: "Mul_749" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_138" + op: "Sqrt" + input: "add_470" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_471/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_471" + op: "AddV2" + input: "Sqrt_138" + input: "add_471/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_139" + op: "RealDiv" + input: "add_469" + input: "add_471" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_750" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_139" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_606" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_138" + op: "Sub" + input: "ReadVariableOp_606" + input: "mul_750" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_563" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias" + input: "sub_138" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_607" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias" + input: "^AssignVariableOp_563" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_564" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_m" + input: "add_469" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_608" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_m" + input: "^AssignVariableOp_564" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_565" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_v" + input: "add_470" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_609" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_v" + input: "^AssignVariableOp_565" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_751/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_751/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_751" + op: "Mul" + input: "Mul_751/x" + input: "Mul_751/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_752/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_752" + op: "Mul" + input: "Mul_752/x" + input: "clip_by_global_norm/clip_by_global_norm/_139" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_472" + op: "AddV2" + input: "Mul_751" + input: "Mul_752" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_753/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_753/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_753" + op: "Mul" + input: "Mul_753/x" + input: "Mul_753/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_139" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_139" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_754/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_754" + op: "Mul" + input: "Mul_754/x" + input: "Square_139" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_473" + op: "AddV2" + input: "Mul_753" + input: "Mul_754" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_139" + op: "Sqrt" + input: "add_473" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_474/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_474" + op: "AddV2" + input: "Sqrt_139" + input: "add_474/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_140" + op: "RealDiv" + input: "add_472" + input: "add_474" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_610" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_755/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_755" + op: "Mul" + input: "mul_755/x" + input: "ReadVariableOp_610" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_475" + op: "AddV2" + input: "truediv_140" + input: "mul_755" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_756" + op: "Mul" + input: "PolynomialDecay" + input: "add_475" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_611" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_139" + op: "Sub" + input: "ReadVariableOp_611" + input: "mul_756" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_566" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel" + input: "sub_139" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_612" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel" + input: "^AssignVariableOp_566" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_567" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m" + input: "add_472" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_613" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m" + input: "^AssignVariableOp_567" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_568" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v" + input: "add_473" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_614" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v" + input: "^AssignVariableOp_568" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_m" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_v" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_757/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_757/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_757" + op: "Mul" + input: "Mul_757/x" + input: "Mul_757/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_758/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_758" + op: "Mul" + input: "Mul_758/x" + input: "clip_by_global_norm/clip_by_global_norm/_140" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_476" + op: "AddV2" + input: "Mul_757" + input: "Mul_758" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_759/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_759/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_759" + op: "Mul" + input: "Mul_759/x" + input: "Mul_759/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_140" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_140" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_760/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_760" + op: "Mul" + input: "Mul_760/x" + input: "Square_140" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_477" + op: "AddV2" + input: "Mul_759" + input: "Mul_760" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_140" + op: "Sqrt" + input: "add_477" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_478/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_478" + op: "AddV2" + input: "Sqrt_140" + input: "add_478/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_141" + op: "RealDiv" + input: "add_476" + input: "add_478" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_761" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_141" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_615" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_140" + op: "Sub" + input: "ReadVariableOp_615" + input: "mul_761" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_569" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias" + input: "sub_140" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_616" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias" + input: "^AssignVariableOp_569" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_570" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_m" + input: "add_476" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_617" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_m" + input: "^AssignVariableOp_570" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_571" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_v" + input: "add_477" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_618" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_v" + input: "^AssignVariableOp_571" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_762/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_762/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_762" + op: "Mul" + input: "Mul_762/x" + input: "Mul_762/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_763/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_763" + op: "Mul" + input: "Mul_763/x" + input: "clip_by_global_norm/clip_by_global_norm/_141" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_479" + op: "AddV2" + input: "Mul_762" + input: "Mul_763" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_764/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_764/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_764" + op: "Mul" + input: "Mul_764/x" + input: "Mul_764/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_141" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_141" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_765/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_765" + op: "Mul" + input: "Mul_765/x" + input: "Square_141" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_480" + op: "AddV2" + input: "Mul_764" + input: "Mul_765" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_141" + op: "Sqrt" + input: "add_480" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_481/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_481" + op: "AddV2" + input: "Sqrt_141" + input: "add_481/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_142" + op: "RealDiv" + input: "add_479" + input: "add_481" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_766" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_142" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_619" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_141" + op: "Sub" + input: "ReadVariableOp_619" + input: "mul_766" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_572" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma" + input: "sub_141" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_620" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma" + input: "^AssignVariableOp_572" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_573" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m" + input: "add_479" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_621" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m" + input: "^AssignVariableOp_573" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_574" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v" + input: "add_480" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_622" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v" + input: "^AssignVariableOp_574" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_767/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_767/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_767" + op: "Mul" + input: "Mul_767/x" + input: "Mul_767/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_768/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_768" + op: "Mul" + input: "Mul_768/x" + input: "clip_by_global_norm/clip_by_global_norm/_142" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_482" + op: "AddV2" + input: "Mul_767" + input: "Mul_768" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_769/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_769/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_769" + op: "Mul" + input: "Mul_769/x" + input: "Mul_769/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_142" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_142" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_770/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_770" + op: "Mul" + input: "Mul_770/x" + input: "Square_142" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_483" + op: "AddV2" + input: "Mul_769" + input: "Mul_770" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_142" + op: "Sqrt" + input: "add_483" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_484/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_484" + op: "AddV2" + input: "Sqrt_142" + input: "add_484/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_143" + op: "RealDiv" + input: "add_482" + input: "add_484" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_771" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_143" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_623" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_142" + op: "Sub" + input: "ReadVariableOp_623" + input: "mul_771" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_575" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta" + input: "sub_142" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_624" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta" + input: "^AssignVariableOp_575" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_576" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m" + input: "add_482" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_625" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m" + input: "^AssignVariableOp_576" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_577" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v" + input: "add_483" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_626" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v" + input: "^AssignVariableOp_577" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_772/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_772/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_772" + op: "Mul" + input: "Mul_772/x" + input: "Mul_772/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_773/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_773" + op: "Mul" + input: "Mul_773/x" + input: "clip_by_global_norm/clip_by_global_norm/_143" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_485" + op: "AddV2" + input: "Mul_772" + input: "Mul_773" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_774/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_774/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_774" + op: "Mul" + input: "Mul_774/x" + input: "Mul_774/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_143" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_143" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_775/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_775" + op: "Mul" + input: "Mul_775/x" + input: "Square_143" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_486" + op: "AddV2" + input: "Mul_774" + input: "Mul_775" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_143" + op: "Sqrt" + input: "add_486" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_487/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_487" + op: "AddV2" + input: "Sqrt_143" + input: "add_487/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_144" + op: "RealDiv" + input: "add_485" + input: "add_487" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_627" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_776/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_776" + op: "Mul" + input: "mul_776/x" + input: "ReadVariableOp_627" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_488" + op: "AddV2" + input: "truediv_144" + input: "mul_776" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_777" + op: "Mul" + input: "PolynomialDecay" + input: "add_488" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_628" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_143" + op: "Sub" + input: "ReadVariableOp_628" + input: "mul_777" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_578" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel" + input: "sub_143" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_629" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel" + input: "^AssignVariableOp_578" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_579" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m" + input: "add_485" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_630" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m" + input: "^AssignVariableOp_579" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_580" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v" + input: "add_486" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_631" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v" + input: "^AssignVariableOp_580" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/intermediate/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_m" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/intermediate/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_v" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_778/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_778/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_778" + op: "Mul" + input: "Mul_778/x" + input: "Mul_778/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_779/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_779" + op: "Mul" + input: "Mul_779/x" + input: "clip_by_global_norm/clip_by_global_norm/_144" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_489" + op: "AddV2" + input: "Mul_778" + input: "Mul_779" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_780/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_780/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_780" + op: "Mul" + input: "Mul_780/x" + input: "Mul_780/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_144" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_144" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_781/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_781" + op: "Mul" + input: "Mul_781/x" + input: "Square_144" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_490" + op: "AddV2" + input: "Mul_780" + input: "Mul_781" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_144" + op: "Sqrt" + input: "add_490" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_491/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_491" + op: "AddV2" + input: "Sqrt_144" + input: "add_491/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_145" + op: "RealDiv" + input: "add_489" + input: "add_491" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_782" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_145" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_632" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_144" + op: "Sub" + input: "ReadVariableOp_632" + input: "mul_782" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_581" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias" + input: "sub_144" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_633" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias" + input: "^AssignVariableOp_581" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_582" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_m" + input: "add_489" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_634" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_m" + input: "^AssignVariableOp_582" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_583" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_v" + input: "add_490" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_635" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_v" + input: "^AssignVariableOp_583" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_8/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_8/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel/adam_m" + input: "bert/encoder/layer_8/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_8/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_8/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel/adam_v" + input: "bert/encoder/layer_8/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_783/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_783/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_783" + op: "Mul" + input: "Mul_783/x" + input: "Mul_783/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_784/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_784" + op: "Mul" + input: "Mul_784/x" + input: "clip_by_global_norm/clip_by_global_norm/_145" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_492" + op: "AddV2" + input: "Mul_783" + input: "Mul_784" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_785/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_785/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_785" + op: "Mul" + input: "Mul_785/x" + input: "Mul_785/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_145" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_145" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_786/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_786" + op: "Mul" + input: "Mul_786/x" + input: "Square_145" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_493" + op: "AddV2" + input: "Mul_785" + input: "Mul_786" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_145" + op: "Sqrt" + input: "add_493" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_494/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_494" + op: "AddV2" + input: "Sqrt_145" + input: "add_494/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_146" + op: "RealDiv" + input: "add_492" + input: "add_494" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_636" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_787/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_787" + op: "Mul" + input: "mul_787/x" + input: "ReadVariableOp_636" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_495" + op: "AddV2" + input: "truediv_146" + input: "mul_787" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_788" + op: "Mul" + input: "PolynomialDecay" + input: "add_495" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_637" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_145" + op: "Sub" + input: "ReadVariableOp_637" + input: "mul_788" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_584" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel" + input: "sub_145" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_638" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel" + input: "^AssignVariableOp_584" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_585" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel/adam_m" + input: "add_492" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_639" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel/adam_m" + input: "^AssignVariableOp_585" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_586" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel/adam_v" + input: "add_493" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_640" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel/adam_v" + input: "^AssignVariableOp_586" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/dense/bias/adam_m" + input: "bert/encoder/layer_8/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/dense/bias/adam_v" + input: "bert/encoder/layer_8/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_789/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_789/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_789" + op: "Mul" + input: "Mul_789/x" + input: "Mul_789/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_790/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_790" + op: "Mul" + input: "Mul_790/x" + input: "clip_by_global_norm/clip_by_global_norm/_146" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_496" + op: "AddV2" + input: "Mul_789" + input: "Mul_790" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_791/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_791/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_791" + op: "Mul" + input: "Mul_791/x" + input: "Mul_791/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_146" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_146" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_792/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_792" + op: "Mul" + input: "Mul_792/x" + input: "Square_146" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_497" + op: "AddV2" + input: "Mul_791" + input: "Mul_792" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_146" + op: "Sqrt" + input: "add_497" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_498/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_498" + op: "AddV2" + input: "Sqrt_146" + input: "add_498/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_147" + op: "RealDiv" + input: "add_496" + input: "add_498" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_793" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_147" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_641" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_146" + op: "Sub" + input: "ReadVariableOp_641" + input: "mul_793" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_587" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/dense/bias" + input: "sub_146" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_642" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/bias" + input: "^AssignVariableOp_587" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_588" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/dense/bias/adam_m" + input: "add_496" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_643" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/bias/adam_m" + input: "^AssignVariableOp_588" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_589" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/dense/bias/adam_v" + input: "add_497" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_644" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/bias/adam_v" + input: "^AssignVariableOp_589" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_794/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_794/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_794" + op: "Mul" + input: "Mul_794/x" + input: "Mul_794/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_795/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_795" + op: "Mul" + input: "Mul_795/x" + input: "clip_by_global_norm/clip_by_global_norm/_147" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_499" + op: "AddV2" + input: "Mul_794" + input: "Mul_795" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_796/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_796/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_796" + op: "Mul" + input: "Mul_796/x" + input: "Mul_796/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_147" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_147" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_797/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_797" + op: "Mul" + input: "Mul_797/x" + input: "Square_147" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_500" + op: "AddV2" + input: "Mul_796" + input: "Mul_797" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_147" + op: "Sqrt" + input: "add_500" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_501/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_501" + op: "AddV2" + input: "Sqrt_147" + input: "add_501/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_148" + op: "RealDiv" + input: "add_499" + input: "add_501" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_798" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_148" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_645" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_147" + op: "Sub" + input: "ReadVariableOp_645" + input: "mul_798" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_590" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma" + input: "sub_147" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_646" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma" + input: "^AssignVariableOp_590" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_591" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m" + input: "add_499" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_647" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m" + input: "^AssignVariableOp_591" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_592" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v" + input: "add_500" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_648" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v" + input: "^AssignVariableOp_592" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_799/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_799/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_799" + op: "Mul" + input: "Mul_799/x" + input: "Mul_799/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_800/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_800" + op: "Mul" + input: "Mul_800/x" + input: "clip_by_global_norm/clip_by_global_norm/_148" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_502" + op: "AddV2" + input: "Mul_799" + input: "Mul_800" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_801/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_801/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_801" + op: "Mul" + input: "Mul_801/x" + input: "Mul_801/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_148" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_148" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_802/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_802" + op: "Mul" + input: "Mul_802/x" + input: "Square_148" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_503" + op: "AddV2" + input: "Mul_801" + input: "Mul_802" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_148" + op: "Sqrt" + input: "add_503" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_504/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_504" + op: "AddV2" + input: "Sqrt_148" + input: "add_504/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_149" + op: "RealDiv" + input: "add_502" + input: "add_504" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_803" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_149" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_649" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_148" + op: "Sub" + input: "ReadVariableOp_649" + input: "mul_803" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_593" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta" + input: "sub_148" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_650" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta" + input: "^AssignVariableOp_593" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_594" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m" + input: "add_502" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_651" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m" + input: "^AssignVariableOp_594" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_595" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v" + input: "add_503" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_652" + op: "ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v" + input: "^AssignVariableOp_595" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/self/query/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_m" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/self/query/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_v" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_804/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_804/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_804" + op: "Mul" + input: "Mul_804/x" + input: "Mul_804/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_805/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_805" + op: "Mul" + input: "Mul_805/x" + input: "clip_by_global_norm/clip_by_global_norm/_149" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_505" + op: "AddV2" + input: "Mul_804" + input: "Mul_805" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_806/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_806/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_806" + op: "Mul" + input: "Mul_806/x" + input: "Mul_806/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_149" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_149" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_807/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_807" + op: "Mul" + input: "Mul_807/x" + input: "Square_149" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_506" + op: "AddV2" + input: "Mul_806" + input: "Mul_807" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_149" + op: "Sqrt" + input: "add_506" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_507/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_507" + op: "AddV2" + input: "Sqrt_149" + input: "add_507/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_150" + op: "RealDiv" + input: "add_505" + input: "add_507" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_653" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_808/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_808" + op: "Mul" + input: "mul_808/x" + input: "ReadVariableOp_653" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_508" + op: "AddV2" + input: "truediv_150" + input: "mul_808" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_809" + op: "Mul" + input: "PolynomialDecay" + input: "add_508" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_654" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_149" + op: "Sub" + input: "ReadVariableOp_654" + input: "mul_809" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_596" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel" + input: "sub_149" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_655" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel" + input: "^AssignVariableOp_596" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_597" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_m" + input: "add_505" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_656" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_m" + input: "^AssignVariableOp_597" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_598" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_v" + input: "add_506" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_657" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_v" + input: "^AssignVariableOp_598" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/self/query/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_m" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/self/query/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_v" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/query/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_810/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_810/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_810" + op: "Mul" + input: "Mul_810/x" + input: "Mul_810/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_811/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_811" + op: "Mul" + input: "Mul_811/x" + input: "clip_by_global_norm/clip_by_global_norm/_150" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_509" + op: "AddV2" + input: "Mul_810" + input: "Mul_811" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_812/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_812/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_812" + op: "Mul" + input: "Mul_812/x" + input: "Mul_812/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_150" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_150" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_813/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_813" + op: "Mul" + input: "Mul_813/x" + input: "Square_150" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_510" + op: "AddV2" + input: "Mul_812" + input: "Mul_813" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_150" + op: "Sqrt" + input: "add_510" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_511/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_511" + op: "AddV2" + input: "Sqrt_150" + input: "add_511/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_151" + op: "RealDiv" + input: "add_509" + input: "add_511" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_814" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_151" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_658" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_150" + op: "Sub" + input: "ReadVariableOp_658" + input: "mul_814" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_599" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias" + input: "sub_150" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_659" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias" + input: "^AssignVariableOp_599" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_600" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_m" + input: "add_509" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_660" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_m" + input: "^AssignVariableOp_600" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_601" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_v" + input: "add_510" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_661" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_v" + input: "^AssignVariableOp_601" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/self/key/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_m" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/self/key/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_v" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_815/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_815/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_815" + op: "Mul" + input: "Mul_815/x" + input: "Mul_815/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_816/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_816" + op: "Mul" + input: "Mul_816/x" + input: "clip_by_global_norm/clip_by_global_norm/_151" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_512" + op: "AddV2" + input: "Mul_815" + input: "Mul_816" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_817/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_817/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_817" + op: "Mul" + input: "Mul_817/x" + input: "Mul_817/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_151" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_151" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_818/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_818" + op: "Mul" + input: "Mul_818/x" + input: "Square_151" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_513" + op: "AddV2" + input: "Mul_817" + input: "Mul_818" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_151" + op: "Sqrt" + input: "add_513" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_514/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_514" + op: "AddV2" + input: "Sqrt_151" + input: "add_514/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_152" + op: "RealDiv" + input: "add_512" + input: "add_514" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_662" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_819/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_819" + op: "Mul" + input: "mul_819/x" + input: "ReadVariableOp_662" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_515" + op: "AddV2" + input: "truediv_152" + input: "mul_819" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_820" + op: "Mul" + input: "PolynomialDecay" + input: "add_515" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_663" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_151" + op: "Sub" + input: "ReadVariableOp_663" + input: "mul_820" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_602" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel" + input: "sub_151" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_664" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel" + input: "^AssignVariableOp_602" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_603" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_m" + input: "add_512" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_665" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_m" + input: "^AssignVariableOp_603" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_604" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_v" + input: "add_513" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_666" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_v" + input: "^AssignVariableOp_604" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/self/key/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_m" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/self/key/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_v" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/key/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_821/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_821/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_821" + op: "Mul" + input: "Mul_821/x" + input: "Mul_821/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_822/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_822" + op: "Mul" + input: "Mul_822/x" + input: "clip_by_global_norm/clip_by_global_norm/_152" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_516" + op: "AddV2" + input: "Mul_821" + input: "Mul_822" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_823/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_823/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_823" + op: "Mul" + input: "Mul_823/x" + input: "Mul_823/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_152" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_152" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_824/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_824" + op: "Mul" + input: "Mul_824/x" + input: "Square_152" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_517" + op: "AddV2" + input: "Mul_823" + input: "Mul_824" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_152" + op: "Sqrt" + input: "add_517" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_518/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_518" + op: "AddV2" + input: "Sqrt_152" + input: "add_518/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_153" + op: "RealDiv" + input: "add_516" + input: "add_518" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_825" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_153" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_667" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_152" + op: "Sub" + input: "ReadVariableOp_667" + input: "mul_825" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_605" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias" + input: "sub_152" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_668" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias" + input: "^AssignVariableOp_605" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_606" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_m" + input: "add_516" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_669" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_m" + input: "^AssignVariableOp_606" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_607" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_v" + input: "add_517" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_670" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_v" + input: "^AssignVariableOp_607" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/self/value/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_m" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/self/value/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_v" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_826/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_826/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_826" + op: "Mul" + input: "Mul_826/x" + input: "Mul_826/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_827/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_827" + op: "Mul" + input: "Mul_827/x" + input: "clip_by_global_norm/clip_by_global_norm/_153" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_519" + op: "AddV2" + input: "Mul_826" + input: "Mul_827" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_828/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_828/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_828" + op: "Mul" + input: "Mul_828/x" + input: "Mul_828/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_153" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_153" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_829/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_829" + op: "Mul" + input: "Mul_829/x" + input: "Square_153" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_520" + op: "AddV2" + input: "Mul_828" + input: "Mul_829" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_153" + op: "Sqrt" + input: "add_520" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_521/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_521" + op: "AddV2" + input: "Sqrt_153" + input: "add_521/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_154" + op: "RealDiv" + input: "add_519" + input: "add_521" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_671" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_830/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_830" + op: "Mul" + input: "mul_830/x" + input: "ReadVariableOp_671" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_522" + op: "AddV2" + input: "truediv_154" + input: "mul_830" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_831" + op: "Mul" + input: "PolynomialDecay" + input: "add_522" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_672" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_153" + op: "Sub" + input: "ReadVariableOp_672" + input: "mul_831" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_608" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel" + input: "sub_153" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_673" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel" + input: "^AssignVariableOp_608" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_609" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_m" + input: "add_519" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_674" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_m" + input: "^AssignVariableOp_609" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_610" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_v" + input: "add_520" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_675" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_v" + input: "^AssignVariableOp_610" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/self/value/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_m" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/self/value/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_v" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/self/value/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_832/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_832/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_832" + op: "Mul" + input: "Mul_832/x" + input: "Mul_832/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_833/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_833" + op: "Mul" + input: "Mul_833/x" + input: "clip_by_global_norm/clip_by_global_norm/_154" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_523" + op: "AddV2" + input: "Mul_832" + input: "Mul_833" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_834/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_834/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_834" + op: "Mul" + input: "Mul_834/x" + input: "Mul_834/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_154" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_154" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_835/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_835" + op: "Mul" + input: "Mul_835/x" + input: "Square_154" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_524" + op: "AddV2" + input: "Mul_834" + input: "Mul_835" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_154" + op: "Sqrt" + input: "add_524" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_525/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_525" + op: "AddV2" + input: "Sqrt_154" + input: "add_525/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_155" + op: "RealDiv" + input: "add_523" + input: "add_525" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_836" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_155" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_676" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_154" + op: "Sub" + input: "ReadVariableOp_676" + input: "mul_836" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_611" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias" + input: "sub_154" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_677" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias" + input: "^AssignVariableOp_611" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_612" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_m" + input: "add_523" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_678" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_m" + input: "^AssignVariableOp_612" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_613" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_v" + input: "add_524" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_679" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_v" + input: "^AssignVariableOp_613" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_837/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_837/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_837" + op: "Mul" + input: "Mul_837/x" + input: "Mul_837/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_838/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_838" + op: "Mul" + input: "Mul_838/x" + input: "clip_by_global_norm/clip_by_global_norm/_155" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_526" + op: "AddV2" + input: "Mul_837" + input: "Mul_838" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_839/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_839/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_839" + op: "Mul" + input: "Mul_839/x" + input: "Mul_839/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_155" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_155" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_840/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_840" + op: "Mul" + input: "Mul_840/x" + input: "Square_155" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_527" + op: "AddV2" + input: "Mul_839" + input: "Mul_840" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_155" + op: "Sqrt" + input: "add_527" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_528/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_528" + op: "AddV2" + input: "Sqrt_155" + input: "add_528/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_156" + op: "RealDiv" + input: "add_526" + input: "add_528" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_680" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_841/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_841" + op: "Mul" + input: "mul_841/x" + input: "ReadVariableOp_680" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_529" + op: "AddV2" + input: "truediv_156" + input: "mul_841" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_842" + op: "Mul" + input: "PolynomialDecay" + input: "add_529" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_681" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_155" + op: "Sub" + input: "ReadVariableOp_681" + input: "mul_842" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_614" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel" + input: "sub_155" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_682" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel" + input: "^AssignVariableOp_614" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_615" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m" + input: "add_526" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_683" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m" + input: "^AssignVariableOp_615" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_616" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v" + input: "add_527" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_684" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v" + input: "^AssignVariableOp_616" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_m" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_v" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_843/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_843/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_843" + op: "Mul" + input: "Mul_843/x" + input: "Mul_843/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_844/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_844" + op: "Mul" + input: "Mul_844/x" + input: "clip_by_global_norm/clip_by_global_norm/_156" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_530" + op: "AddV2" + input: "Mul_843" + input: "Mul_844" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_845/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_845/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_845" + op: "Mul" + input: "Mul_845/x" + input: "Mul_845/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_156" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_156" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_846/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_846" + op: "Mul" + input: "Mul_846/x" + input: "Square_156" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_531" + op: "AddV2" + input: "Mul_845" + input: "Mul_846" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_156" + op: "Sqrt" + input: "add_531" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_532/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_532" + op: "AddV2" + input: "Sqrt_156" + input: "add_532/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_157" + op: "RealDiv" + input: "add_530" + input: "add_532" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_847" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_157" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_685" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_156" + op: "Sub" + input: "ReadVariableOp_685" + input: "mul_847" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_617" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias" + input: "sub_156" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_686" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias" + input: "^AssignVariableOp_617" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_618" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_m" + input: "add_530" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_687" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_m" + input: "^AssignVariableOp_618" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_619" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_v" + input: "add_531" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_688" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_v" + input: "^AssignVariableOp_619" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_848/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_848/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_848" + op: "Mul" + input: "Mul_848/x" + input: "Mul_848/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_849/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_849" + op: "Mul" + input: "Mul_849/x" + input: "clip_by_global_norm/clip_by_global_norm/_157" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_533" + op: "AddV2" + input: "Mul_848" + input: "Mul_849" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_850/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_850/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_850" + op: "Mul" + input: "Mul_850/x" + input: "Mul_850/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_157" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_157" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_851/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_851" + op: "Mul" + input: "Mul_851/x" + input: "Square_157" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_534" + op: "AddV2" + input: "Mul_850" + input: "Mul_851" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_157" + op: "Sqrt" + input: "add_534" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_535/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_535" + op: "AddV2" + input: "Sqrt_157" + input: "add_535/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_158" + op: "RealDiv" + input: "add_533" + input: "add_535" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_852" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_158" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_689" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_157" + op: "Sub" + input: "ReadVariableOp_689" + input: "mul_852" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_620" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma" + input: "sub_157" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_690" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma" + input: "^AssignVariableOp_620" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_621" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m" + input: "add_533" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_691" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m" + input: "^AssignVariableOp_621" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_622" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v" + input: "add_534" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_692" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v" + input: "^AssignVariableOp_622" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_853/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_853/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_853" + op: "Mul" + input: "Mul_853/x" + input: "Mul_853/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_854/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_854" + op: "Mul" + input: "Mul_854/x" + input: "clip_by_global_norm/clip_by_global_norm/_158" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_536" + op: "AddV2" + input: "Mul_853" + input: "Mul_854" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_855/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_855/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_855" + op: "Mul" + input: "Mul_855/x" + input: "Mul_855/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_158" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_158" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_856/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_856" + op: "Mul" + input: "Mul_856/x" + input: "Square_158" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_537" + op: "AddV2" + input: "Mul_855" + input: "Mul_856" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_158" + op: "Sqrt" + input: "add_537" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_538/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_538" + op: "AddV2" + input: "Sqrt_158" + input: "add_538/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_159" + op: "RealDiv" + input: "add_536" + input: "add_538" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_857" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_159" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_693" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_158" + op: "Sub" + input: "ReadVariableOp_693" + input: "mul_857" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_623" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta" + input: "sub_158" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_694" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta" + input: "^AssignVariableOp_623" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_624" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m" + input: "add_536" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_695" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m" + input: "^AssignVariableOp_624" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_625" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v" + input: "add_537" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_696" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v" + input: "^AssignVariableOp_625" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_858/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_858/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_858" + op: "Mul" + input: "Mul_858/x" + input: "Mul_858/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_859/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_859" + op: "Mul" + input: "Mul_859/x" + input: "clip_by_global_norm/clip_by_global_norm/_159" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_539" + op: "AddV2" + input: "Mul_858" + input: "Mul_859" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_860/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_860/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_860" + op: "Mul" + input: "Mul_860/x" + input: "Mul_860/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_159" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_159" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_861/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_861" + op: "Mul" + input: "Mul_861/x" + input: "Square_159" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_540" + op: "AddV2" + input: "Mul_860" + input: "Mul_861" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_159" + op: "Sqrt" + input: "add_540" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_541/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_541" + op: "AddV2" + input: "Sqrt_159" + input: "add_541/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_160" + op: "RealDiv" + input: "add_539" + input: "add_541" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_697" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_862/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_862" + op: "Mul" + input: "mul_862/x" + input: "ReadVariableOp_697" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_542" + op: "AddV2" + input: "truediv_160" + input: "mul_862" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_863" + op: "Mul" + input: "PolynomialDecay" + input: "add_542" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_698" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_159" + op: "Sub" + input: "ReadVariableOp_698" + input: "mul_863" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_626" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel" + input: "sub_159" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_699" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel" + input: "^AssignVariableOp_626" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_627" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m" + input: "add_539" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_700" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m" + input: "^AssignVariableOp_627" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_628" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v" + input: "add_540" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_701" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v" + input: "^AssignVariableOp_628" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/intermediate/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_m" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/intermediate/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_v" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_864/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_864/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_864" + op: "Mul" + input: "Mul_864/x" + input: "Mul_864/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_865/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_865" + op: "Mul" + input: "Mul_865/x" + input: "clip_by_global_norm/clip_by_global_norm/_160" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_543" + op: "AddV2" + input: "Mul_864" + input: "Mul_865" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_866/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_866/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_866" + op: "Mul" + input: "Mul_866/x" + input: "Mul_866/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_160" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_160" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_867/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_867" + op: "Mul" + input: "Mul_867/x" + input: "Square_160" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_544" + op: "AddV2" + input: "Mul_866" + input: "Mul_867" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_160" + op: "Sqrt" + input: "add_544" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_545/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_545" + op: "AddV2" + input: "Sqrt_160" + input: "add_545/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_161" + op: "RealDiv" + input: "add_543" + input: "add_545" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_868" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_161" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_702" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_160" + op: "Sub" + input: "ReadVariableOp_702" + input: "mul_868" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_629" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias" + input: "sub_160" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_703" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias" + input: "^AssignVariableOp_629" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_630" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_m" + input: "add_543" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_704" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_m" + input: "^AssignVariableOp_630" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_631" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_v" + input: "add_544" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_705" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_v" + input: "^AssignVariableOp_631" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_9/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_9/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel/adam_m" + input: "bert/encoder/layer_9/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_9/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_9/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel/adam_v" + input: "bert/encoder/layer_9/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_869/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_869/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_869" + op: "Mul" + input: "Mul_869/x" + input: "Mul_869/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_870/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_870" + op: "Mul" + input: "Mul_870/x" + input: "clip_by_global_norm/clip_by_global_norm/_161" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_546" + op: "AddV2" + input: "Mul_869" + input: "Mul_870" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_871/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_871/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_871" + op: "Mul" + input: "Mul_871/x" + input: "Mul_871/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_161" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_161" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_872/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_872" + op: "Mul" + input: "Mul_872/x" + input: "Square_161" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_547" + op: "AddV2" + input: "Mul_871" + input: "Mul_872" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_161" + op: "Sqrt" + input: "add_547" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_548/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_548" + op: "AddV2" + input: "Sqrt_161" + input: "add_548/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_162" + op: "RealDiv" + input: "add_546" + input: "add_548" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_706" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_873/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_873" + op: "Mul" + input: "mul_873/x" + input: "ReadVariableOp_706" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_549" + op: "AddV2" + input: "truediv_162" + input: "mul_873" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_874" + op: "Mul" + input: "PolynomialDecay" + input: "add_549" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_707" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_161" + op: "Sub" + input: "ReadVariableOp_707" + input: "mul_874" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_632" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel" + input: "sub_161" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_708" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel" + input: "^AssignVariableOp_632" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_633" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel/adam_m" + input: "add_546" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_709" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel/adam_m" + input: "^AssignVariableOp_633" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_634" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel/adam_v" + input: "add_547" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_710" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel/adam_v" + input: "^AssignVariableOp_634" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/dense/bias/adam_m" + input: "bert/encoder/layer_9/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/dense/bias/adam_v" + input: "bert/encoder/layer_9/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_875/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_875/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_875" + op: "Mul" + input: "Mul_875/x" + input: "Mul_875/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_876/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_876" + op: "Mul" + input: "Mul_876/x" + input: "clip_by_global_norm/clip_by_global_norm/_162" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_550" + op: "AddV2" + input: "Mul_875" + input: "Mul_876" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_877/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_877/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_877" + op: "Mul" + input: "Mul_877/x" + input: "Mul_877/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_162" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_162" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_878/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_878" + op: "Mul" + input: "Mul_878/x" + input: "Square_162" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_551" + op: "AddV2" + input: "Mul_877" + input: "Mul_878" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_162" + op: "Sqrt" + input: "add_551" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_552/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_552" + op: "AddV2" + input: "Sqrt_162" + input: "add_552/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_163" + op: "RealDiv" + input: "add_550" + input: "add_552" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_879" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_163" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_711" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_162" + op: "Sub" + input: "ReadVariableOp_711" + input: "mul_879" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_635" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/dense/bias" + input: "sub_162" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_712" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/bias" + input: "^AssignVariableOp_635" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_636" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/dense/bias/adam_m" + input: "add_550" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_713" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/bias/adam_m" + input: "^AssignVariableOp_636" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_637" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/dense/bias/adam_v" + input: "add_551" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_714" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/bias/adam_v" + input: "^AssignVariableOp_637" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_880/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_880/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_880" + op: "Mul" + input: "Mul_880/x" + input: "Mul_880/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_881/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_881" + op: "Mul" + input: "Mul_881/x" + input: "clip_by_global_norm/clip_by_global_norm/_163" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_553" + op: "AddV2" + input: "Mul_880" + input: "Mul_881" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_882/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_882/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_882" + op: "Mul" + input: "Mul_882/x" + input: "Mul_882/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_163" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_163" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_883/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_883" + op: "Mul" + input: "Mul_883/x" + input: "Square_163" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_554" + op: "AddV2" + input: "Mul_882" + input: "Mul_883" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_163" + op: "Sqrt" + input: "add_554" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_555/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_555" + op: "AddV2" + input: "Sqrt_163" + input: "add_555/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_164" + op: "RealDiv" + input: "add_553" + input: "add_555" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_884" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_164" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_715" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_163" + op: "Sub" + input: "ReadVariableOp_715" + input: "mul_884" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_638" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma" + input: "sub_163" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_716" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma" + input: "^AssignVariableOp_638" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_639" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m" + input: "add_553" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_717" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m" + input: "^AssignVariableOp_639" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_640" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v" + input: "add_554" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_718" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v" + input: "^AssignVariableOp_640" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_885/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_885/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_885" + op: "Mul" + input: "Mul_885/x" + input: "Mul_885/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_886/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_886" + op: "Mul" + input: "Mul_886/x" + input: "clip_by_global_norm/clip_by_global_norm/_164" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_556" + op: "AddV2" + input: "Mul_885" + input: "Mul_886" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_887/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_887/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_887" + op: "Mul" + input: "Mul_887/x" + input: "Mul_887/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_164" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_164" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_888/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_888" + op: "Mul" + input: "Mul_888/x" + input: "Square_164" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_557" + op: "AddV2" + input: "Mul_887" + input: "Mul_888" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_164" + op: "Sqrt" + input: "add_557" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_558/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_558" + op: "AddV2" + input: "Sqrt_164" + input: "add_558/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_165" + op: "RealDiv" + input: "add_556" + input: "add_558" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_889" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_165" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_719" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_164" + op: "Sub" + input: "ReadVariableOp_719" + input: "mul_889" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_641" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta" + input: "sub_164" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_720" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta" + input: "^AssignVariableOp_641" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_642" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m" + input: "add_556" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_721" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m" + input: "^AssignVariableOp_642" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_643" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v" + input: "add_557" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_722" + op: "ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v" + input: "^AssignVariableOp_643" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/self/query/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_m" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/self/query/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_v" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_890/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_890/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_890" + op: "Mul" + input: "Mul_890/x" + input: "Mul_890/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_891/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_891" + op: "Mul" + input: "Mul_891/x" + input: "clip_by_global_norm/clip_by_global_norm/_165" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_559" + op: "AddV2" + input: "Mul_890" + input: "Mul_891" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_892/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_892/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_892" + op: "Mul" + input: "Mul_892/x" + input: "Mul_892/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_165" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_165" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_893/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_893" + op: "Mul" + input: "Mul_893/x" + input: "Square_165" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_560" + op: "AddV2" + input: "Mul_892" + input: "Mul_893" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_165" + op: "Sqrt" + input: "add_560" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_561/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_561" + op: "AddV2" + input: "Sqrt_165" + input: "add_561/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_166" + op: "RealDiv" + input: "add_559" + input: "add_561" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_723" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_894/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_894" + op: "Mul" + input: "mul_894/x" + input: "ReadVariableOp_723" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_562" + op: "AddV2" + input: "truediv_166" + input: "mul_894" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_895" + op: "Mul" + input: "PolynomialDecay" + input: "add_562" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_724" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_165" + op: "Sub" + input: "ReadVariableOp_724" + input: "mul_895" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_644" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel" + input: "sub_165" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_725" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel" + input: "^AssignVariableOp_644" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_645" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_m" + input: "add_559" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_726" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_m" + input: "^AssignVariableOp_645" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_646" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_v" + input: "add_560" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_727" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_v" + input: "^AssignVariableOp_646" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/self/query/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_m" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/self/query/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_v" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/query/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_896/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_896/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_896" + op: "Mul" + input: "Mul_896/x" + input: "Mul_896/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_897/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_897" + op: "Mul" + input: "Mul_897/x" + input: "clip_by_global_norm/clip_by_global_norm/_166" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_563" + op: "AddV2" + input: "Mul_896" + input: "Mul_897" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_898/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_898/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_898" + op: "Mul" + input: "Mul_898/x" + input: "Mul_898/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_166" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_166" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_899/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_899" + op: "Mul" + input: "Mul_899/x" + input: "Square_166" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_564" + op: "AddV2" + input: "Mul_898" + input: "Mul_899" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_166" + op: "Sqrt" + input: "add_564" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_565/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_565" + op: "AddV2" + input: "Sqrt_166" + input: "add_565/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_167" + op: "RealDiv" + input: "add_563" + input: "add_565" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_900" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_167" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_728" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_166" + op: "Sub" + input: "ReadVariableOp_728" + input: "mul_900" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_647" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias" + input: "sub_166" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_729" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias" + input: "^AssignVariableOp_647" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_648" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_m" + input: "add_563" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_730" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_m" + input: "^AssignVariableOp_648" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_649" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_v" + input: "add_564" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_731" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_v" + input: "^AssignVariableOp_649" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/self/key/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_m" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/self/key/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_v" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_901/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_901/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_901" + op: "Mul" + input: "Mul_901/x" + input: "Mul_901/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_902/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_902" + op: "Mul" + input: "Mul_902/x" + input: "clip_by_global_norm/clip_by_global_norm/_167" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_566" + op: "AddV2" + input: "Mul_901" + input: "Mul_902" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_903/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_903/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_903" + op: "Mul" + input: "Mul_903/x" + input: "Mul_903/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_167" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_167" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_904/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_904" + op: "Mul" + input: "Mul_904/x" + input: "Square_167" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_567" + op: "AddV2" + input: "Mul_903" + input: "Mul_904" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_167" + op: "Sqrt" + input: "add_567" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_568/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_568" + op: "AddV2" + input: "Sqrt_167" + input: "add_568/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_168" + op: "RealDiv" + input: "add_566" + input: "add_568" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_732" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_905/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_905" + op: "Mul" + input: "mul_905/x" + input: "ReadVariableOp_732" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_569" + op: "AddV2" + input: "truediv_168" + input: "mul_905" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_906" + op: "Mul" + input: "PolynomialDecay" + input: "add_569" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_733" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_167" + op: "Sub" + input: "ReadVariableOp_733" + input: "mul_906" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_650" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel" + input: "sub_167" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_734" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel" + input: "^AssignVariableOp_650" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_651" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_m" + input: "add_566" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_735" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_m" + input: "^AssignVariableOp_651" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_652" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_v" + input: "add_567" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_736" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_v" + input: "^AssignVariableOp_652" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/self/key/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_m" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/self/key/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_v" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/key/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_907/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_907/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_907" + op: "Mul" + input: "Mul_907/x" + input: "Mul_907/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_908/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_908" + op: "Mul" + input: "Mul_908/x" + input: "clip_by_global_norm/clip_by_global_norm/_168" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_570" + op: "AddV2" + input: "Mul_907" + input: "Mul_908" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_909/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_909/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_909" + op: "Mul" + input: "Mul_909/x" + input: "Mul_909/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_168" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_168" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_910/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_910" + op: "Mul" + input: "Mul_910/x" + input: "Square_168" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_571" + op: "AddV2" + input: "Mul_909" + input: "Mul_910" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_168" + op: "Sqrt" + input: "add_571" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_572/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_572" + op: "AddV2" + input: "Sqrt_168" + input: "add_572/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_169" + op: "RealDiv" + input: "add_570" + input: "add_572" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_911" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_169" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_737" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_168" + op: "Sub" + input: "ReadVariableOp_737" + input: "mul_911" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_653" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias" + input: "sub_168" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_738" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias" + input: "^AssignVariableOp_653" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_654" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_m" + input: "add_570" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_739" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_m" + input: "^AssignVariableOp_654" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_655" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_v" + input: "add_571" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_740" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_v" + input: "^AssignVariableOp_655" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/self/value/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_m" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/self/value/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_v" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_912/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_912/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_912" + op: "Mul" + input: "Mul_912/x" + input: "Mul_912/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_913/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_913" + op: "Mul" + input: "Mul_913/x" + input: "clip_by_global_norm/clip_by_global_norm/_169" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_573" + op: "AddV2" + input: "Mul_912" + input: "Mul_913" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_914/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_914/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_914" + op: "Mul" + input: "Mul_914/x" + input: "Mul_914/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_169" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_169" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_915/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_915" + op: "Mul" + input: "Mul_915/x" + input: "Square_169" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_574" + op: "AddV2" + input: "Mul_914" + input: "Mul_915" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_169" + op: "Sqrt" + input: "add_574" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_575/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_575" + op: "AddV2" + input: "Sqrt_169" + input: "add_575/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_170" + op: "RealDiv" + input: "add_573" + input: "add_575" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_741" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_916/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_916" + op: "Mul" + input: "mul_916/x" + input: "ReadVariableOp_741" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_576" + op: "AddV2" + input: "truediv_170" + input: "mul_916" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_917" + op: "Mul" + input: "PolynomialDecay" + input: "add_576" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_742" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_169" + op: "Sub" + input: "ReadVariableOp_742" + input: "mul_917" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_656" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel" + input: "sub_169" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_743" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel" + input: "^AssignVariableOp_656" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_657" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_m" + input: "add_573" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_744" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_m" + input: "^AssignVariableOp_657" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_658" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_v" + input: "add_574" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_745" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_v" + input: "^AssignVariableOp_658" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/self/value/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_m" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/self/value/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_v" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/self/value/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_918/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_918/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_918" + op: "Mul" + input: "Mul_918/x" + input: "Mul_918/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_919/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_919" + op: "Mul" + input: "Mul_919/x" + input: "clip_by_global_norm/clip_by_global_norm/_170" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_577" + op: "AddV2" + input: "Mul_918" + input: "Mul_919" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_920/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_920/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_920" + op: "Mul" + input: "Mul_920/x" + input: "Mul_920/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_170" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_170" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_921/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_921" + op: "Mul" + input: "Mul_921/x" + input: "Square_170" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_578" + op: "AddV2" + input: "Mul_920" + input: "Mul_921" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_170" + op: "Sqrt" + input: "add_578" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_579/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_579" + op: "AddV2" + input: "Sqrt_170" + input: "add_579/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_171" + op: "RealDiv" + input: "add_577" + input: "add_579" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_922" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_171" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_746" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_170" + op: "Sub" + input: "ReadVariableOp_746" + input: "mul_922" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_659" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias" + input: "sub_170" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_747" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias" + input: "^AssignVariableOp_659" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_660" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_m" + input: "add_577" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_748" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_m" + input: "^AssignVariableOp_660" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_661" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_v" + input: "add_578" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_749" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_v" + input: "^AssignVariableOp_661" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_923/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_923/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_923" + op: "Mul" + input: "Mul_923/x" + input: "Mul_923/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_924/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_924" + op: "Mul" + input: "Mul_924/x" + input: "clip_by_global_norm/clip_by_global_norm/_171" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_580" + op: "AddV2" + input: "Mul_923" + input: "Mul_924" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_925/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_925/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_925" + op: "Mul" + input: "Mul_925/x" + input: "Mul_925/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_171" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_171" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_926/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_926" + op: "Mul" + input: "Mul_926/x" + input: "Square_171" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_581" + op: "AddV2" + input: "Mul_925" + input: "Mul_926" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_171" + op: "Sqrt" + input: "add_581" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_582/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_582" + op: "AddV2" + input: "Sqrt_171" + input: "add_582/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_172" + op: "RealDiv" + input: "add_580" + input: "add_582" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_750" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_927/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_927" + op: "Mul" + input: "mul_927/x" + input: "ReadVariableOp_750" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_583" + op: "AddV2" + input: "truediv_172" + input: "mul_927" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_928" + op: "Mul" + input: "PolynomialDecay" + input: "add_583" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_751" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_171" + op: "Sub" + input: "ReadVariableOp_751" + input: "mul_928" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_662" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel" + input: "sub_171" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_752" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel" + input: "^AssignVariableOp_662" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_663" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m" + input: "add_580" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_753" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m" + input: "^AssignVariableOp_663" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_664" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v" + input: "add_581" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_754" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v" + input: "^AssignVariableOp_664" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_m" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_v" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_929/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_929/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_929" + op: "Mul" + input: "Mul_929/x" + input: "Mul_929/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_930/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_930" + op: "Mul" + input: "Mul_930/x" + input: "clip_by_global_norm/clip_by_global_norm/_172" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_584" + op: "AddV2" + input: "Mul_929" + input: "Mul_930" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_931/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_931/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_931" + op: "Mul" + input: "Mul_931/x" + input: "Mul_931/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_172" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_172" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_932/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_932" + op: "Mul" + input: "Mul_932/x" + input: "Square_172" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_585" + op: "AddV2" + input: "Mul_931" + input: "Mul_932" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_172" + op: "Sqrt" + input: "add_585" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_586/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_586" + op: "AddV2" + input: "Sqrt_172" + input: "add_586/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_173" + op: "RealDiv" + input: "add_584" + input: "add_586" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_933" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_173" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_755" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_172" + op: "Sub" + input: "ReadVariableOp_755" + input: "mul_933" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_665" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias" + input: "sub_172" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_756" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias" + input: "^AssignVariableOp_665" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_666" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_m" + input: "add_584" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_757" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_m" + input: "^AssignVariableOp_666" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_667" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_v" + input: "add_585" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_758" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_v" + input: "^AssignVariableOp_667" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_934/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_934/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_934" + op: "Mul" + input: "Mul_934/x" + input: "Mul_934/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_935/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_935" + op: "Mul" + input: "Mul_935/x" + input: "clip_by_global_norm/clip_by_global_norm/_173" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_587" + op: "AddV2" + input: "Mul_934" + input: "Mul_935" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_936/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_936/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_936" + op: "Mul" + input: "Mul_936/x" + input: "Mul_936/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_173" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_173" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_937/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_937" + op: "Mul" + input: "Mul_937/x" + input: "Square_173" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_588" + op: "AddV2" + input: "Mul_936" + input: "Mul_937" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_173" + op: "Sqrt" + input: "add_588" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_589/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_589" + op: "AddV2" + input: "Sqrt_173" + input: "add_589/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_174" + op: "RealDiv" + input: "add_587" + input: "add_589" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_938" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_174" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_759" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_173" + op: "Sub" + input: "ReadVariableOp_759" + input: "mul_938" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_668" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma" + input: "sub_173" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_760" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma" + input: "^AssignVariableOp_668" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_669" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m" + input: "add_587" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_761" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m" + input: "^AssignVariableOp_669" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_670" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v" + input: "add_588" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_762" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v" + input: "^AssignVariableOp_670" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_939/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_939/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_939" + op: "Mul" + input: "Mul_939/x" + input: "Mul_939/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_940/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_940" + op: "Mul" + input: "Mul_940/x" + input: "clip_by_global_norm/clip_by_global_norm/_174" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_590" + op: "AddV2" + input: "Mul_939" + input: "Mul_940" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_941/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_941/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_941" + op: "Mul" + input: "Mul_941/x" + input: "Mul_941/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_174" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_174" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_942/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_942" + op: "Mul" + input: "Mul_942/x" + input: "Square_174" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_591" + op: "AddV2" + input: "Mul_941" + input: "Mul_942" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_174" + op: "Sqrt" + input: "add_591" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_592/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_592" + op: "AddV2" + input: "Sqrt_174" + input: "add_592/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_175" + op: "RealDiv" + input: "add_590" + input: "add_592" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_943" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_175" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_763" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_174" + op: "Sub" + input: "ReadVariableOp_763" + input: "mul_943" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_671" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta" + input: "sub_174" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_764" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta" + input: "^AssignVariableOp_671" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_672" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m" + input: "add_590" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_765" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m" + input: "^AssignVariableOp_672" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_673" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v" + input: "add_591" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_766" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v" + input: "^AssignVariableOp_673" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_944/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_944/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_944" + op: "Mul" + input: "Mul_944/x" + input: "Mul_944/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_945/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_945" + op: "Mul" + input: "Mul_945/x" + input: "clip_by_global_norm/clip_by_global_norm/_175" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_593" + op: "AddV2" + input: "Mul_944" + input: "Mul_945" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_946/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_946/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_946" + op: "Mul" + input: "Mul_946/x" + input: "Mul_946/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_175" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_175" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_947/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_947" + op: "Mul" + input: "Mul_947/x" + input: "Square_175" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_594" + op: "AddV2" + input: "Mul_946" + input: "Mul_947" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_175" + op: "Sqrt" + input: "add_594" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_595/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_595" + op: "AddV2" + input: "Sqrt_175" + input: "add_595/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_176" + op: "RealDiv" + input: "add_593" + input: "add_595" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_767" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_948/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_948" + op: "Mul" + input: "mul_948/x" + input: "ReadVariableOp_767" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_596" + op: "AddV2" + input: "truediv_176" + input: "mul_948" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_949" + op: "Mul" + input: "PolynomialDecay" + input: "add_596" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_768" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_175" + op: "Sub" + input: "ReadVariableOp_768" + input: "mul_949" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_674" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel" + input: "sub_175" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_769" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel" + input: "^AssignVariableOp_674" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_675" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m" + input: "add_593" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_770" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m" + input: "^AssignVariableOp_675" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_676" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v" + input: "add_594" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_771" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v" + input: "^AssignVariableOp_676" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/intermediate/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_m" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/intermediate/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_v" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_950/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_950/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_950" + op: "Mul" + input: "Mul_950/x" + input: "Mul_950/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_951/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_951" + op: "Mul" + input: "Mul_951/x" + input: "clip_by_global_norm/clip_by_global_norm/_176" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_597" + op: "AddV2" + input: "Mul_950" + input: "Mul_951" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_952/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_952/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_952" + op: "Mul" + input: "Mul_952/x" + input: "Mul_952/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_176" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_176" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_953/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_953" + op: "Mul" + input: "Mul_953/x" + input: "Square_176" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_598" + op: "AddV2" + input: "Mul_952" + input: "Mul_953" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_176" + op: "Sqrt" + input: "add_598" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_599/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_599" + op: "AddV2" + input: "Sqrt_176" + input: "add_599/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_177" + op: "RealDiv" + input: "add_597" + input: "add_599" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_954" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_177" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_772" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_176" + op: "Sub" + input: "ReadVariableOp_772" + input: "mul_954" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_677" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias" + input: "sub_176" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_773" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias" + input: "^AssignVariableOp_677" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_678" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_m" + input: "add_597" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_774" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_m" + input: "^AssignVariableOp_678" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_679" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_v" + input: "add_598" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_775" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_v" + input: "^AssignVariableOp_679" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_10/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_10/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel/adam_m" + input: "bert/encoder/layer_10/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_10/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_10/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel/adam_v" + input: "bert/encoder/layer_10/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_955/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_955/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_955" + op: "Mul" + input: "Mul_955/x" + input: "Mul_955/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_956/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_956" + op: "Mul" + input: "Mul_956/x" + input: "clip_by_global_norm/clip_by_global_norm/_177" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_600" + op: "AddV2" + input: "Mul_955" + input: "Mul_956" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_957/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_957/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_957" + op: "Mul" + input: "Mul_957/x" + input: "Mul_957/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_177" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_177" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_958/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_958" + op: "Mul" + input: "Mul_958/x" + input: "Square_177" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_601" + op: "AddV2" + input: "Mul_957" + input: "Mul_958" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_177" + op: "Sqrt" + input: "add_601" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_602/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_602" + op: "AddV2" + input: "Sqrt_177" + input: "add_602/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_178" + op: "RealDiv" + input: "add_600" + input: "add_602" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_776" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_959/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_959" + op: "Mul" + input: "mul_959/x" + input: "ReadVariableOp_776" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_603" + op: "AddV2" + input: "truediv_178" + input: "mul_959" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_960" + op: "Mul" + input: "PolynomialDecay" + input: "add_603" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_777" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_177" + op: "Sub" + input: "ReadVariableOp_777" + input: "mul_960" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_680" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel" + input: "sub_177" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_778" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel" + input: "^AssignVariableOp_680" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_681" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel/adam_m" + input: "add_600" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_779" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel/adam_m" + input: "^AssignVariableOp_681" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_682" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel/adam_v" + input: "add_601" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_780" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel/adam_v" + input: "^AssignVariableOp_682" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/dense/bias/adam_m" + input: "bert/encoder/layer_10/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/dense/bias/adam_v" + input: "bert/encoder/layer_10/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_961/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_961/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_961" + op: "Mul" + input: "Mul_961/x" + input: "Mul_961/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_962/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_962" + op: "Mul" + input: "Mul_962/x" + input: "clip_by_global_norm/clip_by_global_norm/_178" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_604" + op: "AddV2" + input: "Mul_961" + input: "Mul_962" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_963/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_963/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_963" + op: "Mul" + input: "Mul_963/x" + input: "Mul_963/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_178" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_178" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_964/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_964" + op: "Mul" + input: "Mul_964/x" + input: "Square_178" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_605" + op: "AddV2" + input: "Mul_963" + input: "Mul_964" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_178" + op: "Sqrt" + input: "add_605" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_606/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_606" + op: "AddV2" + input: "Sqrt_178" + input: "add_606/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_179" + op: "RealDiv" + input: "add_604" + input: "add_606" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_965" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_179" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_781" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_178" + op: "Sub" + input: "ReadVariableOp_781" + input: "mul_965" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_683" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/dense/bias" + input: "sub_178" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_782" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/bias" + input: "^AssignVariableOp_683" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_684" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/dense/bias/adam_m" + input: "add_604" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_783" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/bias/adam_m" + input: "^AssignVariableOp_684" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_685" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/dense/bias/adam_v" + input: "add_605" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_784" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/bias/adam_v" + input: "^AssignVariableOp_685" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_966/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_966/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_966" + op: "Mul" + input: "Mul_966/x" + input: "Mul_966/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_967/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_967" + op: "Mul" + input: "Mul_967/x" + input: "clip_by_global_norm/clip_by_global_norm/_179" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_607" + op: "AddV2" + input: "Mul_966" + input: "Mul_967" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_968/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_968/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_968" + op: "Mul" + input: "Mul_968/x" + input: "Mul_968/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_179" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_179" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_969/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_969" + op: "Mul" + input: "Mul_969/x" + input: "Square_179" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_608" + op: "AddV2" + input: "Mul_968" + input: "Mul_969" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_179" + op: "Sqrt" + input: "add_608" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_609/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_609" + op: "AddV2" + input: "Sqrt_179" + input: "add_609/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_180" + op: "RealDiv" + input: "add_607" + input: "add_609" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_970" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_180" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_785" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_179" + op: "Sub" + input: "ReadVariableOp_785" + input: "mul_970" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_686" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma" + input: "sub_179" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_786" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma" + input: "^AssignVariableOp_686" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_687" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m" + input: "add_607" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_787" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m" + input: "^AssignVariableOp_687" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_688" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v" + input: "add_608" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_788" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v" + input: "^AssignVariableOp_688" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_971/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_971/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_971" + op: "Mul" + input: "Mul_971/x" + input: "Mul_971/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_972/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_972" + op: "Mul" + input: "Mul_972/x" + input: "clip_by_global_norm/clip_by_global_norm/_180" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_610" + op: "AddV2" + input: "Mul_971" + input: "Mul_972" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_973/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_973/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_973" + op: "Mul" + input: "Mul_973/x" + input: "Mul_973/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_180" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_180" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_974/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_974" + op: "Mul" + input: "Mul_974/x" + input: "Square_180" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_611" + op: "AddV2" + input: "Mul_973" + input: "Mul_974" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_180" + op: "Sqrt" + input: "add_611" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_612/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_612" + op: "AddV2" + input: "Sqrt_180" + input: "add_612/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_181" + op: "RealDiv" + input: "add_610" + input: "add_612" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_975" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_181" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_789" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_180" + op: "Sub" + input: "ReadVariableOp_789" + input: "mul_975" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_689" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta" + input: "sub_180" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_790" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta" + input: "^AssignVariableOp_689" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_690" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m" + input: "add_610" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_791" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m" + input: "^AssignVariableOp_690" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_691" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v" + input: "add_611" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_792" + op: "ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v" + input: "^AssignVariableOp_691" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/self/query/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_m" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/self/query/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_v" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_976/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_976/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_976" + op: "Mul" + input: "Mul_976/x" + input: "Mul_976/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_977/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_977" + op: "Mul" + input: "Mul_977/x" + input: "clip_by_global_norm/clip_by_global_norm/_181" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_613" + op: "AddV2" + input: "Mul_976" + input: "Mul_977" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_978/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_978/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_978" + op: "Mul" + input: "Mul_978/x" + input: "Mul_978/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_181" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_181" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_979/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_979" + op: "Mul" + input: "Mul_979/x" + input: "Square_181" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_614" + op: "AddV2" + input: "Mul_978" + input: "Mul_979" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_181" + op: "Sqrt" + input: "add_614" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_615/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_615" + op: "AddV2" + input: "Sqrt_181" + input: "add_615/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_182" + op: "RealDiv" + input: "add_613" + input: "add_615" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_793" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_980/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_980" + op: "Mul" + input: "mul_980/x" + input: "ReadVariableOp_793" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_616" + op: "AddV2" + input: "truediv_182" + input: "mul_980" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_981" + op: "Mul" + input: "PolynomialDecay" + input: "add_616" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_794" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_181" + op: "Sub" + input: "ReadVariableOp_794" + input: "mul_981" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_692" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel" + input: "sub_181" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_795" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel" + input: "^AssignVariableOp_692" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_693" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_m" + input: "add_613" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_796" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_m" + input: "^AssignVariableOp_693" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_694" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_v" + input: "add_614" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_797" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_v" + input: "^AssignVariableOp_694" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/self/query/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_m" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/query/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/self/query/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_v" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/query/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_982/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_982/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_982" + op: "Mul" + input: "Mul_982/x" + input: "Mul_982/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_983/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_983" + op: "Mul" + input: "Mul_983/x" + input: "clip_by_global_norm/clip_by_global_norm/_182" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_617" + op: "AddV2" + input: "Mul_982" + input: "Mul_983" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_984/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_984/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_984" + op: "Mul" + input: "Mul_984/x" + input: "Mul_984/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_182" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_182" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_985/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_985" + op: "Mul" + input: "Mul_985/x" + input: "Square_182" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_618" + op: "AddV2" + input: "Mul_984" + input: "Mul_985" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_182" + op: "Sqrt" + input: "add_618" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_619/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_619" + op: "AddV2" + input: "Sqrt_182" + input: "add_619/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_183" + op: "RealDiv" + input: "add_617" + input: "add_619" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_986" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_183" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_798" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_182" + op: "Sub" + input: "ReadVariableOp_798" + input: "mul_986" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_695" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias" + input: "sub_182" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_799" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias" + input: "^AssignVariableOp_695" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_696" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_m" + input: "add_617" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_800" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_m" + input: "^AssignVariableOp_696" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_697" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_v" + input: "add_618" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_801" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_v" + input: "^AssignVariableOp_697" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/self/key/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_m" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/self/key/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_v" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_987/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_987/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_987" + op: "Mul" + input: "Mul_987/x" + input: "Mul_987/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_988/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_988" + op: "Mul" + input: "Mul_988/x" + input: "clip_by_global_norm/clip_by_global_norm/_183" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_620" + op: "AddV2" + input: "Mul_987" + input: "Mul_988" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_989/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_989/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_989" + op: "Mul" + input: "Mul_989/x" + input: "Mul_989/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_183" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_183" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_990/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_990" + op: "Mul" + input: "Mul_990/x" + input: "Square_183" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_621" + op: "AddV2" + input: "Mul_989" + input: "Mul_990" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_183" + op: "Sqrt" + input: "add_621" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_622/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_622" + op: "AddV2" + input: "Sqrt_183" + input: "add_622/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_184" + op: "RealDiv" + input: "add_620" + input: "add_622" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_802" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_991/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_991" + op: "Mul" + input: "mul_991/x" + input: "ReadVariableOp_802" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_623" + op: "AddV2" + input: "truediv_184" + input: "mul_991" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_992" + op: "Mul" + input: "PolynomialDecay" + input: "add_623" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_803" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_183" + op: "Sub" + input: "ReadVariableOp_803" + input: "mul_992" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_698" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel" + input: "sub_183" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_804" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel" + input: "^AssignVariableOp_698" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_699" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_m" + input: "add_620" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_805" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_m" + input: "^AssignVariableOp_699" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_700" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_v" + input: "add_621" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_806" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_v" + input: "^AssignVariableOp_700" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/self/key/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_m" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/key/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/self/key/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_v" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/key/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_993/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_993/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_993" + op: "Mul" + input: "Mul_993/x" + input: "Mul_993/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_994/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_994" + op: "Mul" + input: "Mul_994/x" + input: "clip_by_global_norm/clip_by_global_norm/_184" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_624" + op: "AddV2" + input: "Mul_993" + input: "Mul_994" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_995/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_995/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_995" + op: "Mul" + input: "Mul_995/x" + input: "Mul_995/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_184" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_184" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_996/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_996" + op: "Mul" + input: "Mul_996/x" + input: "Square_184" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_625" + op: "AddV2" + input: "Mul_995" + input: "Mul_996" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_184" + op: "Sqrt" + input: "add_625" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_626/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_626" + op: "AddV2" + input: "Sqrt_184" + input: "add_626/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_185" + op: "RealDiv" + input: "add_624" + input: "add_626" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_997" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_185" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_807" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_184" + op: "Sub" + input: "ReadVariableOp_807" + input: "mul_997" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_701" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias" + input: "sub_184" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_808" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias" + input: "^AssignVariableOp_701" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_702" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_m" + input: "add_624" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_809" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_m" + input: "^AssignVariableOp_702" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_703" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_v" + input: "add_625" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_810" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_v" + input: "^AssignVariableOp_703" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/self/value/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_m" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/self/value/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_v" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_998/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_998/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_998" + op: "Mul" + input: "Mul_998/x" + input: "Mul_998/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_999/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_999" + op: "Mul" + input: "Mul_999/x" + input: "clip_by_global_norm/clip_by_global_norm/_185" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_627" + op: "AddV2" + input: "Mul_998" + input: "Mul_999" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1000/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_1000/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1000" + op: "Mul" + input: "Mul_1000/x" + input: "Mul_1000/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_185" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_185" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1001/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_1001" + op: "Mul" + input: "Mul_1001/x" + input: "Square_185" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_628" + op: "AddV2" + input: "Mul_1000" + input: "Mul_1001" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_185" + op: "Sqrt" + input: "add_628" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_629/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_629" + op: "AddV2" + input: "Sqrt_185" + input: "add_629/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_186" + op: "RealDiv" + input: "add_627" + input: "add_629" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_811" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_1002/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_1002" + op: "Mul" + input: "mul_1002/x" + input: "ReadVariableOp_811" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_630" + op: "AddV2" + input: "truediv_186" + input: "mul_1002" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_1003" + op: "Mul" + input: "PolynomialDecay" + input: "add_630" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_812" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_185" + op: "Sub" + input: "ReadVariableOp_812" + input: "mul_1003" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_704" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel" + input: "sub_185" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_813" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel" + input: "^AssignVariableOp_704" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_705" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_m" + input: "add_627" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_814" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_m" + input: "^AssignVariableOp_705" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_706" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_v" + input: "add_628" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_815" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_v" + input: "^AssignVariableOp_706" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/self/value/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_m" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/self/value/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/self/value/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_v" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/self/value/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1004/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_1004/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1004" + op: "Mul" + input: "Mul_1004/x" + input: "Mul_1004/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1005/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_1005" + op: "Mul" + input: "Mul_1005/x" + input: "clip_by_global_norm/clip_by_global_norm/_186" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_631" + op: "AddV2" + input: "Mul_1004" + input: "Mul_1005" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1006/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_1006/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1006" + op: "Mul" + input: "Mul_1006/x" + input: "Mul_1006/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_186" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_186" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1007/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_1007" + op: "Mul" + input: "Mul_1007/x" + input: "Square_186" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_632" + op: "AddV2" + input: "Mul_1006" + input: "Mul_1007" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_186" + op: "Sqrt" + input: "add_632" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_633/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_633" + op: "AddV2" + input: "Sqrt_186" + input: "add_633/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_187" + op: "RealDiv" + input: "add_631" + input: "add_633" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_1008" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_187" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_816" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_186" + op: "Sub" + input: "ReadVariableOp_816" + input: "mul_1008" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_707" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias" + input: "sub_186" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_817" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias" + input: "^AssignVariableOp_707" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_708" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_m" + input: "add_631" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_818" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_m" + input: "^AssignVariableOp_708" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_709" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_v" + input: "add_632" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_819" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_v" + input: "^AssignVariableOp_709" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1009/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_1009/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1009" + op: "Mul" + input: "Mul_1009/x" + input: "Mul_1009/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1010/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_1010" + op: "Mul" + input: "Mul_1010/x" + input: "clip_by_global_norm/clip_by_global_norm/_187" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_634" + op: "AddV2" + input: "Mul_1009" + input: "Mul_1010" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1011/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_1011/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1011" + op: "Mul" + input: "Mul_1011/x" + input: "Mul_1011/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_187" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_187" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1012/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_1012" + op: "Mul" + input: "Mul_1012/x" + input: "Square_187" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_635" + op: "AddV2" + input: "Mul_1011" + input: "Mul_1012" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_187" + op: "Sqrt" + input: "add_635" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_636/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_636" + op: "AddV2" + input: "Sqrt_187" + input: "add_636/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_188" + op: "RealDiv" + input: "add_634" + input: "add_636" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_820" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_1013/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_1013" + op: "Mul" + input: "mul_1013/x" + input: "ReadVariableOp_820" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_637" + op: "AddV2" + input: "truediv_188" + input: "mul_1013" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_1014" + op: "Mul" + input: "PolynomialDecay" + input: "add_637" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_821" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_187" + op: "Sub" + input: "ReadVariableOp_821" + input: "mul_1014" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_710" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel" + input: "sub_187" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_822" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel" + input: "^AssignVariableOp_710" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_711" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m" + input: "add_634" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_823" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m" + input: "^AssignVariableOp_711" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_712" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v" + input: "add_635" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_824" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v" + input: "^AssignVariableOp_712" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_m" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_v" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1015/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_1015/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1015" + op: "Mul" + input: "Mul_1015/x" + input: "Mul_1015/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1016/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_1016" + op: "Mul" + input: "Mul_1016/x" + input: "clip_by_global_norm/clip_by_global_norm/_188" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_638" + op: "AddV2" + input: "Mul_1015" + input: "Mul_1016" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1017/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_1017/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1017" + op: "Mul" + input: "Mul_1017/x" + input: "Mul_1017/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_188" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_188" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1018/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_1018" + op: "Mul" + input: "Mul_1018/x" + input: "Square_188" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_639" + op: "AddV2" + input: "Mul_1017" + input: "Mul_1018" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_188" + op: "Sqrt" + input: "add_639" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_640/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_640" + op: "AddV2" + input: "Sqrt_188" + input: "add_640/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_189" + op: "RealDiv" + input: "add_638" + input: "add_640" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_1019" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_189" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_825" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_188" + op: "Sub" + input: "ReadVariableOp_825" + input: "mul_1019" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_713" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias" + input: "sub_188" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_826" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias" + input: "^AssignVariableOp_713" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_714" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_m" + input: "add_638" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_827" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_m" + input: "^AssignVariableOp_714" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_715" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_v" + input: "add_639" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_828" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_v" + input: "^AssignVariableOp_715" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1020/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_1020/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1020" + op: "Mul" + input: "Mul_1020/x" + input: "Mul_1020/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1021/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_1021" + op: "Mul" + input: "Mul_1021/x" + input: "clip_by_global_norm/clip_by_global_norm/_189" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_641" + op: "AddV2" + input: "Mul_1020" + input: "Mul_1021" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1022/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_1022/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1022" + op: "Mul" + input: "Mul_1022/x" + input: "Mul_1022/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_189" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_189" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1023/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_1023" + op: "Mul" + input: "Mul_1023/x" + input: "Square_189" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_642" + op: "AddV2" + input: "Mul_1022" + input: "Mul_1023" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_189" + op: "Sqrt" + input: "add_642" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_643/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_643" + op: "AddV2" + input: "Sqrt_189" + input: "add_643/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_190" + op: "RealDiv" + input: "add_641" + input: "add_643" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_1024" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_190" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_829" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_189" + op: "Sub" + input: "ReadVariableOp_829" + input: "mul_1024" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_716" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma" + input: "sub_189" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_830" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma" + input: "^AssignVariableOp_716" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_717" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m" + input: "add_641" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_831" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m" + input: "^AssignVariableOp_717" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_718" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v" + input: "add_642" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_832" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v" + input: "^AssignVariableOp_718" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1025/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_1025/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1025" + op: "Mul" + input: "Mul_1025/x" + input: "Mul_1025/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1026/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_1026" + op: "Mul" + input: "Mul_1026/x" + input: "clip_by_global_norm/clip_by_global_norm/_190" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_644" + op: "AddV2" + input: "Mul_1025" + input: "Mul_1026" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1027/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_1027/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1027" + op: "Mul" + input: "Mul_1027/x" + input: "Mul_1027/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_190" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_190" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1028/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_1028" + op: "Mul" + input: "Mul_1028/x" + input: "Square_190" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_645" + op: "AddV2" + input: "Mul_1027" + input: "Mul_1028" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_190" + op: "Sqrt" + input: "add_645" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_646/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_646" + op: "AddV2" + input: "Sqrt_190" + input: "add_646/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_191" + op: "RealDiv" + input: "add_644" + input: "add_646" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_1029" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_191" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_833" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_190" + op: "Sub" + input: "ReadVariableOp_833" + input: "mul_1029" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_719" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta" + input: "sub_190" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_834" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta" + input: "^AssignVariableOp_719" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_720" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m" + input: "add_644" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_835" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m" + input: "^AssignVariableOp_720" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_721" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v" + input: "add_645" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_836" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v" + input: "^AssignVariableOp_721" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\003\000\000\000\014\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1030/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_1030/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1030" + op: "Mul" + input: "Mul_1030/x" + input: "Mul_1030/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_1031/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_1031" + op: "Mul" + input: "Mul_1031/x" + input: "clip_by_global_norm/clip_by_global_norm/_191" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_647" + op: "AddV2" + input: "Mul_1030" + input: "Mul_1031" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_1032/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_1032/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1032" + op: "Mul" + input: "Mul_1032/x" + input: "Mul_1032/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_191" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_191" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_1033/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_1033" + op: "Mul" + input: "Mul_1033/x" + input: "Square_191" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_648" + op: "AddV2" + input: "Mul_1032" + input: "Mul_1033" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_191" + op: "Sqrt" + input: "add_648" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_649/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_649" + op: "AddV2" + input: "Sqrt_191" + input: "add_649/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_192" + op: "RealDiv" + input: "add_647" + input: "add_649" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_837" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_1034/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_1034" + op: "Mul" + input: "mul_1034/x" + input: "ReadVariableOp_837" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_650" + op: "AddV2" + input: "truediv_192" + input: "mul_1034" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_1035" + op: "Mul" + input: "PolynomialDecay" + input: "add_650" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_838" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_191" + op: "Sub" + input: "ReadVariableOp_838" + input: "mul_1035" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_722" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel" + input: "sub_191" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_839" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel" + input: "^AssignVariableOp_722" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_723" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m" + input: "add_647" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_840" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m" + input: "^AssignVariableOp_723" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_724" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v" + input: "add_648" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_841" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v" + input: "^AssignVariableOp_724" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/intermediate/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_m" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 3072 + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/intermediate/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/intermediate/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_v" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1036/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_1036/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1036" + op: "Mul" + input: "Mul_1036/x" + input: "Mul_1036/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_1037/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_1037" + op: "Mul" + input: "Mul_1037/x" + input: "clip_by_global_norm/clip_by_global_norm/_192" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_651" + op: "AddV2" + input: "Mul_1036" + input: "Mul_1037" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_1038/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_1038/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1038" + op: "Mul" + input: "Mul_1038/x" + input: "Mul_1038/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Square_192" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_192" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Mul_1039/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_1039" + op: "Mul" + input: "Mul_1039/x" + input: "Square_192" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_652" + op: "AddV2" + input: "Mul_1038" + input: "Mul_1039" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "Sqrt_192" + op: "Sqrt" + input: "add_652" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "add_653/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_653" + op: "AddV2" + input: "Sqrt_192" + input: "add_653/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "truediv_193" + op: "RealDiv" + input: "add_651" + input: "add_653" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "mul_1040" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_193" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "ReadVariableOp_842" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_192" + op: "Sub" + input: "ReadVariableOp_842" + input: "mul_1040" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } +} +node { + name: "AssignVariableOp_725" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias" + input: "sub_192" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_843" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias" + input: "^AssignVariableOp_725" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_726" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_m" + input: "add_651" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_844" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_m" + input: "^AssignVariableOp_726" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_727" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_v" + input: "add_652" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_845" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_v" + input: "^AssignVariableOp_727" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/adam_m/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_11/output/dense/kernel/adam_m/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_11/output/dense/kernel/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/kernel/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/output/dense/kernel/adam_m" + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel/adam_m" + input: "bert/encoder/layer_11/output/dense/kernel/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\000\014\000\000\000\003\000\000" + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/adam_v/Initializer/zeros" + op: "Fill" + input: "bert/encoder/layer_11/output/dense/kernel/adam_v/Initializer/zeros/shape_as_tensor" + input: "bert/encoder/layer_11/output/dense/kernel/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/kernel/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/output/dense/kernel/adam_v" + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel/adam_v" + input: "bert/encoder/layer_11/output/dense/kernel/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/kernel/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1041/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_1041/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1041" + op: "Mul" + input: "Mul_1041/x" + input: "Mul_1041/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1042/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_1042" + op: "Mul" + input: "Mul_1042/x" + input: "clip_by_global_norm/clip_by_global_norm/_193" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_654" + op: "AddV2" + input: "Mul_1041" + input: "Mul_1042" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1043/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_1043/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1043" + op: "Mul" + input: "Mul_1043/x" + input: "Mul_1043/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_193" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_193" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1044/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_1044" + op: "Mul" + input: "Mul_1044/x" + input: "Square_193" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_655" + op: "AddV2" + input: "Mul_1043" + input: "Mul_1044" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_193" + op: "Sqrt" + input: "add_655" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_656/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_656" + op: "AddV2" + input: "Sqrt_193" + input: "add_656/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_194" + op: "RealDiv" + input: "add_654" + input: "add_656" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_846" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_1045/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_1045" + op: "Mul" + input: "mul_1045/x" + input: "ReadVariableOp_846" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_657" + op: "AddV2" + input: "truediv_194" + input: "mul_1045" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_1046" + op: "Mul" + input: "PolynomialDecay" + input: "add_657" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_847" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_193" + op: "Sub" + input: "ReadVariableOp_847" + input: "mul_1046" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_728" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel" + input: "sub_193" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_848" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel" + input: "^AssignVariableOp_728" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_729" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel/adam_m" + input: "add_654" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_849" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel/adam_m" + input: "^AssignVariableOp_729" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_730" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel/adam_v" + input: "add_655" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_850" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel/adam_v" + input: "^AssignVariableOp_730" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 3072 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/output/dense/bias/adam_m" + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/bias/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/dense/bias/adam_m" + input: "bert/encoder/layer_11/output/dense/bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/dense/bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/output/dense/bias/adam_v" + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/bias/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/dense/bias/adam_v" + input: "bert/encoder/layer_11/output/dense/bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/output/dense/bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1047/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_1047/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1047" + op: "Mul" + input: "Mul_1047/x" + input: "Mul_1047/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1048/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_1048" + op: "Mul" + input: "Mul_1048/x" + input: "clip_by_global_norm/clip_by_global_norm/_194" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_658" + op: "AddV2" + input: "Mul_1047" + input: "Mul_1048" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1049/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_1049/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1049" + op: "Mul" + input: "Mul_1049/x" + input: "Mul_1049/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_194" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_194" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1050/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_1050" + op: "Mul" + input: "Mul_1050/x" + input: "Square_194" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_659" + op: "AddV2" + input: "Mul_1049" + input: "Mul_1050" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_194" + op: "Sqrt" + input: "add_659" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_660/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_660" + op: "AddV2" + input: "Sqrt_194" + input: "add_660/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_195" + op: "RealDiv" + input: "add_658" + input: "add_660" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_1051" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_195" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_851" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_194" + op: "Sub" + input: "ReadVariableOp_851" + input: "mul_1051" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_731" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/dense/bias" + input: "sub_194" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_852" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/bias" + input: "^AssignVariableOp_731" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_732" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/dense/bias/adam_m" + input: "add_658" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_853" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/bias/adam_m" + input: "^AssignVariableOp_732" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_733" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/dense/bias/adam_v" + input: "add_659" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_854" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/bias/adam_v" + input: "^AssignVariableOp_733" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m" + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v" + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1052/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_1052/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1052" + op: "Mul" + input: "Mul_1052/x" + input: "Mul_1052/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1053/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_1053" + op: "Mul" + input: "Mul_1053/x" + input: "clip_by_global_norm/clip_by_global_norm/_195" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_661" + op: "AddV2" + input: "Mul_1052" + input: "Mul_1053" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1054/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_1054/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1054" + op: "Mul" + input: "Mul_1054/x" + input: "Mul_1054/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_195" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_195" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1055/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_1055" + op: "Mul" + input: "Mul_1055/x" + input: "Square_195" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_662" + op: "AddV2" + input: "Mul_1054" + input: "Mul_1055" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_195" + op: "Sqrt" + input: "add_662" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_663/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_663" + op: "AddV2" + input: "Sqrt_195" + input: "add_663/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_196" + op: "RealDiv" + input: "add_661" + input: "add_663" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_1056" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_196" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_855" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_195" + op: "Sub" + input: "ReadVariableOp_855" + input: "mul_1056" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_734" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma" + input: "sub_195" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_856" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma" + input: "^AssignVariableOp_734" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_735" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m" + input: "add_661" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_857" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m" + input: "^AssignVariableOp_735" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_736" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v" + input: "add_662" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_858" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v" + input: "^AssignVariableOp_736" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m" + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 768 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v" + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v/Assign" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1057/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_1057/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1057" + op: "Mul" + input: "Mul_1057/x" + input: "Mul_1057/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1058/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_1058" + op: "Mul" + input: "Mul_1058/x" + input: "clip_by_global_norm/clip_by_global_norm/_196" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_664" + op: "AddV2" + input: "Mul_1057" + input: "Mul_1058" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1059/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_1059/ReadVariableOp" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1059" + op: "Mul" + input: "Mul_1059/x" + input: "Mul_1059/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_196" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_196" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1060/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_1060" + op: "Mul" + input: "Mul_1060/x" + input: "Square_196" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_665" + op: "AddV2" + input: "Mul_1059" + input: "Mul_1060" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_196" + op: "Sqrt" + input: "add_665" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_666/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_666" + op: "AddV2" + input: "Sqrt_196" + input: "add_666/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_197" + op: "RealDiv" + input: "add_664" + input: "add_666" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_1061" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_197" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_859" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_196" + op: "Sub" + input: "ReadVariableOp_859" + input: "mul_1061" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_737" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta" + input: "sub_196" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_860" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta" + input: "^AssignVariableOp_737" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_738" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m" + input: "add_664" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_861" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m" + input: "^AssignVariableOp_738" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_739" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v" + input: "add_665" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_862" + op: "ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v" + input: "^AssignVariableOp_739" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "cls/squad/output_weights/adam_m/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_weights/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\002\000\000\000\000\003\000\000" + } + } + } +} +node { + name: "cls/squad/output_weights/adam_m/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_weights/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "cls/squad/output_weights/adam_m/Initializer/zeros" + op: "Fill" + input: "cls/squad/output_weights/adam_m/Initializer/zeros/shape_as_tensor" + input: "cls/squad/output_weights/adam_m/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_weights/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "cls/squad/output_weights/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_weights/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "cls/squad/output_weights/adam_m" + } + } +} +node { + name: "cls/squad/output_weights/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "cls/squad/output_weights/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "cls/squad/output_weights/adam_m/Assign" + op: "AssignVariableOp" + input: "cls/squad/output_weights/adam_m" + input: "cls/squad/output_weights/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "cls/squad/output_weights/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "cls/squad/output_weights/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "cls/squad/output_weights/adam_v/Initializer/zeros/shape_as_tensor" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_weights/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\002\000\000\000\000\003\000\000" + } + } + } +} +node { + name: "cls/squad/output_weights/adam_v/Initializer/zeros/Const" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_weights/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0 + } + } + } +} +node { + name: "cls/squad/output_weights/adam_v/Initializer/zeros" + op: "Fill" + input: "cls/squad/output_weights/adam_v/Initializer/zeros/shape_as_tensor" + input: "cls/squad/output_weights/adam_v/Initializer/zeros/Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_weights/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "index_type" + value { + type: DT_INT32 + } + } +} +node { + name: "cls/squad/output_weights/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_weights/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + attr { + key: "shared_name" + value { + s: "cls/squad/output_weights/adam_v" + } + } +} +node { + name: "cls/squad/output_weights/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "cls/squad/output_weights/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "cls/squad/output_weights/adam_v/Assign" + op: "AssignVariableOp" + input: "cls/squad/output_weights/adam_v" + input: "cls/squad/output_weights/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "cls/squad/output_weights/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "cls/squad/output_weights/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1062/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_1062/ReadVariableOp" + op: "ReadVariableOp" + input: "cls/squad/output_weights/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1062" + op: "Mul" + input: "Mul_1062/x" + input: "Mul_1062/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1063/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_1063" + op: "Mul" + input: "Mul_1063/x" + input: "clip_by_global_norm/clip_by_global_norm/_199" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_667" + op: "AddV2" + input: "Mul_1062" + input: "Mul_1063" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1064/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_1064/ReadVariableOp" + op: "ReadVariableOp" + input: "cls/squad/output_weights/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1064" + op: "Mul" + input: "Mul_1064/x" + input: "Mul_1064/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Square_197" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_199" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Mul_1065/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_1065" + op: "Mul" + input: "Mul_1065/x" + input: "Square_197" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_668" + op: "AddV2" + input: "Mul_1064" + input: "Mul_1065" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "Sqrt_197" + op: "Sqrt" + input: "add_668" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_669/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_669" + op: "AddV2" + input: "Sqrt_197" + input: "add_669/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "truediv_198" + op: "RealDiv" + input: "add_667" + input: "add_669" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_863" + op: "ReadVariableOp" + input: "cls/squad/output_weights" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "mul_1066/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.009999999776482582 + } + } + } +} +node { + name: "mul_1066" + op: "Mul" + input: "mul_1066/x" + input: "ReadVariableOp_863" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "add_670" + op: "AddV2" + input: "truediv_198" + input: "mul_1066" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "mul_1067" + op: "Mul" + input: "PolynomialDecay" + input: "add_670" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "ReadVariableOp_864" + op: "ReadVariableOp" + input: "cls/squad/output_weights" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_197" + op: "Sub" + input: "ReadVariableOp_864" + input: "mul_1067" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } +} +node { + name: "AssignVariableOp_740" + op: "AssignVariableOp" + input: "cls/squad/output_weights" + input: "sub_197" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_865" + op: "ReadVariableOp" + input: "cls/squad/output_weights" + input: "^AssignVariableOp_740" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_741" + op: "AssignVariableOp" + input: "cls/squad/output_weights/adam_m" + input: "add_667" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_866" + op: "ReadVariableOp" + input: "cls/squad/output_weights/adam_m" + input: "^AssignVariableOp_741" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_742" + op: "AssignVariableOp" + input: "cls/squad/output_weights/adam_v" + input: "add_668" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_867" + op: "ReadVariableOp" + input: "cls/squad/output_weights/adam_v" + input: "^AssignVariableOp_742" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + dim { + size: 768 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "cls/squad/output_bias/adam_m/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 2 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "cls/squad/output_bias/adam_m" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_bias/adam_m" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 2 + } + } + } + } + attr { + key: "shared_name" + value { + s: "cls/squad/output_bias/adam_m" + } + } +} +node { + name: "cls/squad/output_bias/adam_m/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "cls/squad/output_bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "cls/squad/output_bias/adam_m/Assign" + op: "AssignVariableOp" + input: "cls/squad/output_bias/adam_m" + input: "cls/squad/output_bias/adam_m/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "cls/squad/output_bias/adam_m/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "cls/squad/output_bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "cls/squad/output_bias/adam_v/Initializer/zeros" + op: "Const" + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 2 + } + } + float_val: 0.0 + } + } + } +} +node { + name: "cls/squad/output_bias/adam_v" + op: "VarHandleOp" + attr { + key: "_class" + value { + list { + s: "loc:@cls/squad/output_bias/adam_v" + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "allowed_devices" + value { + list { + } + } + } + attr { + key: "container" + value { + s: "" + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 2 + } + } + } + } + attr { + key: "shared_name" + value { + s: "cls/squad/output_bias/adam_v" + } + } +} +node { + name: "cls/squad/output_bias/adam_v/IsInitialized/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "cls/squad/output_bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "cls/squad/output_bias/adam_v/Assign" + op: "AssignVariableOp" + input: "cls/squad/output_bias/adam_v" + input: "cls/squad/output_bias/adam_v/Initializer/zeros" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "cls/squad/output_bias/adam_v/Read/ReadVariableOp" + op: "ReadVariableOp" + input: "cls/squad/output_bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1068/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.8999999761581421 + } + } + } +} +node { + name: "Mul_1068/ReadVariableOp" + op: "ReadVariableOp" + input: "cls/squad/output_bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1068" + op: "Mul" + input: "Mul_1068/x" + input: "Mul_1068/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } +} +node { + name: "Mul_1069/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.10000000149011612 + } + } + } +} +node { + name: "Mul_1069" + op: "Mul" + input: "Mul_1069/x" + input: "clip_by_global_norm/clip_by_global_norm/_200" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } +} +node { + name: "add_671" + op: "AddV2" + input: "Mul_1068" + input: "Mul_1069" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } +} +node { + name: "Mul_1070/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.9990000128746033 + } + } + } +} +node { + name: "Mul_1070/ReadVariableOp" + op: "ReadVariableOp" + input: "cls/squad/output_bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "Mul_1070" + op: "Mul" + input: "Mul_1070/x" + input: "Mul_1070/ReadVariableOp" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } +} +node { + name: "Square_198" + op: "Square" + input: "clip_by_global_norm/clip_by_global_norm/_200" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } +} +node { + name: "Mul_1071/x" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 0.0010000000474974513 + } + } + } +} +node { + name: "Mul_1071" + op: "Mul" + input: "Mul_1071/x" + input: "Square_198" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } +} +node { + name: "add_672" + op: "AddV2" + input: "Mul_1070" + input: "Mul_1071" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } +} +node { + name: "Sqrt_198" + op: "Sqrt" + input: "add_672" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } +} +node { + name: "add_673/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 9.999999974752427e-07 + } + } + } +} +node { + name: "add_673" + op: "AddV2" + input: "Sqrt_198" + input: "add_673/y" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } +} +node { + name: "truediv_199" + op: "RealDiv" + input: "add_671" + input: "add_673" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } +} +node { + name: "mul_1072" + op: "Mul" + input: "PolynomialDecay" + input: "truediv_199" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } +} +node { + name: "ReadVariableOp_868" + op: "ReadVariableOp" + input: "cls/squad/output_bias" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "sub_198" + op: "Sub" + input: "ReadVariableOp_868" + input: "mul_1072" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } +} +node { + name: "AssignVariableOp_743" + op: "AssignVariableOp" + input: "cls/squad/output_bias" + input: "sub_198" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_869" + op: "ReadVariableOp" + input: "cls/squad/output_bias" + input: "^AssignVariableOp_743" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_744" + op: "AssignVariableOp" + input: "cls/squad/output_bias/adam_m" + input: "add_671" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_870" + op: "ReadVariableOp" + input: "cls/squad/output_bias/adam_m" + input: "^AssignVariableOp_744" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "AssignVariableOp_745" + op: "AssignVariableOp" + input: "cls/squad/output_bias/adam_v" + input: "add_672" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_871" + op: "ReadVariableOp" + input: "cls/squad/output_bias/adam_v" + input: "^AssignVariableOp_745" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 2 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } +} +node { + name: "group_deps" + op: "NoOp" + input: "^AssignVariableOp_149" + input: "^AssignVariableOp_150" + input: "^AssignVariableOp_151" + input: "^AssignVariableOp_152" + input: "^AssignVariableOp_153" + input: "^AssignVariableOp_154" + input: "^AssignVariableOp_155" + input: "^AssignVariableOp_156" + input: "^AssignVariableOp_157" + input: "^AssignVariableOp_158" + input: "^AssignVariableOp_159" + input: "^AssignVariableOp_160" + input: "^AssignVariableOp_161" + input: "^AssignVariableOp_162" + input: "^AssignVariableOp_163" + input: "^AssignVariableOp_164" + input: "^AssignVariableOp_165" + input: "^AssignVariableOp_166" + input: "^AssignVariableOp_167" + input: "^AssignVariableOp_168" + input: "^AssignVariableOp_169" + input: "^AssignVariableOp_170" + input: "^AssignVariableOp_171" + input: "^AssignVariableOp_172" + input: "^AssignVariableOp_173" + input: "^AssignVariableOp_174" + input: "^AssignVariableOp_175" + input: "^AssignVariableOp_176" + input: "^AssignVariableOp_177" + input: "^AssignVariableOp_178" + input: "^AssignVariableOp_179" + input: "^AssignVariableOp_180" + input: "^AssignVariableOp_181" + input: "^AssignVariableOp_182" + input: "^AssignVariableOp_183" + input: "^AssignVariableOp_184" + input: "^AssignVariableOp_185" + input: "^AssignVariableOp_186" + input: "^AssignVariableOp_187" + input: "^AssignVariableOp_188" + input: "^AssignVariableOp_189" + input: "^AssignVariableOp_190" + input: "^AssignVariableOp_191" + input: "^AssignVariableOp_192" + input: "^AssignVariableOp_193" + input: "^AssignVariableOp_194" + input: "^AssignVariableOp_195" + input: "^AssignVariableOp_196" + input: "^AssignVariableOp_197" + input: "^AssignVariableOp_198" + input: "^AssignVariableOp_199" + input: "^AssignVariableOp_200" + input: "^AssignVariableOp_201" + input: "^AssignVariableOp_202" + input: "^AssignVariableOp_203" + input: "^AssignVariableOp_204" + input: "^AssignVariableOp_205" + input: "^AssignVariableOp_206" + input: "^AssignVariableOp_207" + input: "^AssignVariableOp_208" + input: "^AssignVariableOp_209" + input: "^AssignVariableOp_210" + input: "^AssignVariableOp_211" + input: "^AssignVariableOp_212" + input: "^AssignVariableOp_213" + input: "^AssignVariableOp_214" + input: "^AssignVariableOp_215" + input: "^AssignVariableOp_216" + input: "^AssignVariableOp_217" + input: "^AssignVariableOp_218" + input: "^AssignVariableOp_219" + input: "^AssignVariableOp_220" + input: "^AssignVariableOp_221" + input: "^AssignVariableOp_222" + input: "^AssignVariableOp_223" + input: "^AssignVariableOp_224" + input: "^AssignVariableOp_225" + input: "^AssignVariableOp_226" + input: "^AssignVariableOp_227" + input: "^AssignVariableOp_228" + input: "^AssignVariableOp_229" + input: "^AssignVariableOp_230" + input: "^AssignVariableOp_231" + input: "^AssignVariableOp_232" + input: "^AssignVariableOp_233" + input: "^AssignVariableOp_234" + input: "^AssignVariableOp_235" + input: "^AssignVariableOp_236" + input: "^AssignVariableOp_237" + input: "^AssignVariableOp_238" + input: "^AssignVariableOp_239" + input: "^AssignVariableOp_240" + input: "^AssignVariableOp_241" + input: "^AssignVariableOp_242" + input: "^AssignVariableOp_243" + input: "^AssignVariableOp_244" + input: "^AssignVariableOp_245" + input: "^AssignVariableOp_246" + input: "^AssignVariableOp_247" + input: "^AssignVariableOp_248" + input: "^AssignVariableOp_249" + input: "^AssignVariableOp_250" + input: "^AssignVariableOp_251" + input: "^AssignVariableOp_252" + input: "^AssignVariableOp_253" + input: "^AssignVariableOp_254" + input: "^AssignVariableOp_255" + input: "^AssignVariableOp_256" + input: "^AssignVariableOp_257" + input: "^AssignVariableOp_258" + input: "^AssignVariableOp_259" + input: "^AssignVariableOp_260" + input: "^AssignVariableOp_261" + input: "^AssignVariableOp_262" + input: "^AssignVariableOp_263" + input: "^AssignVariableOp_264" + input: "^AssignVariableOp_265" + input: "^AssignVariableOp_266" + input: "^AssignVariableOp_267" + input: "^AssignVariableOp_268" + input: "^AssignVariableOp_269" + input: "^AssignVariableOp_270" + input: "^AssignVariableOp_271" + input: "^AssignVariableOp_272" + input: "^AssignVariableOp_273" + input: "^AssignVariableOp_274" + input: "^AssignVariableOp_275" + input: "^AssignVariableOp_276" + input: "^AssignVariableOp_277" + input: "^AssignVariableOp_278" + input: "^AssignVariableOp_279" + input: "^AssignVariableOp_280" + input: "^AssignVariableOp_281" + input: "^AssignVariableOp_282" + input: "^AssignVariableOp_283" + input: "^AssignVariableOp_284" + input: "^AssignVariableOp_285" + input: "^AssignVariableOp_286" + input: "^AssignVariableOp_287" + input: "^AssignVariableOp_288" + input: "^AssignVariableOp_289" + input: "^AssignVariableOp_290" + input: "^AssignVariableOp_291" + input: "^AssignVariableOp_292" + input: "^AssignVariableOp_293" + input: "^AssignVariableOp_294" + input: "^AssignVariableOp_295" + input: "^AssignVariableOp_296" + input: "^AssignVariableOp_297" + input: "^AssignVariableOp_298" + input: "^AssignVariableOp_299" + input: "^AssignVariableOp_300" + input: "^AssignVariableOp_301" + input: "^AssignVariableOp_302" + input: "^AssignVariableOp_303" + input: "^AssignVariableOp_304" + input: "^AssignVariableOp_305" + input: "^AssignVariableOp_306" + input: "^AssignVariableOp_307" + input: "^AssignVariableOp_308" + input: "^AssignVariableOp_309" + input: "^AssignVariableOp_310" + input: "^AssignVariableOp_311" + input: "^AssignVariableOp_312" + input: "^AssignVariableOp_313" + input: "^AssignVariableOp_314" + input: "^AssignVariableOp_315" + input: "^AssignVariableOp_316" + input: "^AssignVariableOp_317" + input: "^AssignVariableOp_318" + input: "^AssignVariableOp_319" + input: "^AssignVariableOp_320" + input: "^AssignVariableOp_321" + input: "^AssignVariableOp_322" + input: "^AssignVariableOp_323" + input: "^AssignVariableOp_324" + input: "^AssignVariableOp_325" + input: "^AssignVariableOp_326" + input: "^AssignVariableOp_327" + input: "^AssignVariableOp_328" + input: "^AssignVariableOp_329" + input: "^AssignVariableOp_330" + input: "^AssignVariableOp_331" + input: "^AssignVariableOp_332" + input: "^AssignVariableOp_333" + input: "^AssignVariableOp_334" + input: "^AssignVariableOp_335" + input: "^AssignVariableOp_336" + input: "^AssignVariableOp_337" + input: "^AssignVariableOp_338" + input: "^AssignVariableOp_339" + input: "^AssignVariableOp_340" + input: "^AssignVariableOp_341" + input: "^AssignVariableOp_342" + input: "^AssignVariableOp_343" + input: "^AssignVariableOp_344" + input: "^AssignVariableOp_345" + input: "^AssignVariableOp_346" + input: "^AssignVariableOp_347" + input: "^AssignVariableOp_348" + input: "^AssignVariableOp_349" + input: "^AssignVariableOp_350" + input: "^AssignVariableOp_351" + input: "^AssignVariableOp_352" + input: "^AssignVariableOp_353" + input: "^AssignVariableOp_354" + input: "^AssignVariableOp_355" + input: "^AssignVariableOp_356" + input: "^AssignVariableOp_357" + input: "^AssignVariableOp_358" + input: "^AssignVariableOp_359" + input: "^AssignVariableOp_360" + input: "^AssignVariableOp_361" + input: "^AssignVariableOp_362" + input: "^AssignVariableOp_363" + input: "^AssignVariableOp_364" + input: "^AssignVariableOp_365" + input: "^AssignVariableOp_366" + input: "^AssignVariableOp_367" + input: "^AssignVariableOp_368" + input: "^AssignVariableOp_369" + input: "^AssignVariableOp_370" + input: "^AssignVariableOp_371" + input: "^AssignVariableOp_372" + input: "^AssignVariableOp_373" + input: "^AssignVariableOp_374" + input: "^AssignVariableOp_375" + input: "^AssignVariableOp_376" + input: "^AssignVariableOp_377" + input: "^AssignVariableOp_378" + input: "^AssignVariableOp_379" + input: "^AssignVariableOp_380" + input: "^AssignVariableOp_381" + input: "^AssignVariableOp_382" + input: "^AssignVariableOp_383" + input: "^AssignVariableOp_384" + input: "^AssignVariableOp_385" + input: "^AssignVariableOp_386" + input: "^AssignVariableOp_387" + input: "^AssignVariableOp_388" + input: "^AssignVariableOp_389" + input: "^AssignVariableOp_390" + input: "^AssignVariableOp_391" + input: "^AssignVariableOp_392" + input: "^AssignVariableOp_393" + input: "^AssignVariableOp_394" + input: "^AssignVariableOp_395" + input: "^AssignVariableOp_396" + input: "^AssignVariableOp_397" + input: "^AssignVariableOp_398" + input: "^AssignVariableOp_399" + input: "^AssignVariableOp_400" + input: "^AssignVariableOp_401" + input: "^AssignVariableOp_402" + input: "^AssignVariableOp_403" + input: "^AssignVariableOp_404" + input: "^AssignVariableOp_405" + input: "^AssignVariableOp_406" + input: "^AssignVariableOp_407" + input: "^AssignVariableOp_408" + input: "^AssignVariableOp_409" + input: "^AssignVariableOp_410" + input: "^AssignVariableOp_411" + input: "^AssignVariableOp_412" + input: "^AssignVariableOp_413" + input: "^AssignVariableOp_414" + input: "^AssignVariableOp_415" + input: "^AssignVariableOp_416" + input: "^AssignVariableOp_417" + input: "^AssignVariableOp_418" + input: "^AssignVariableOp_419" + input: "^AssignVariableOp_420" + input: "^AssignVariableOp_421" + input: "^AssignVariableOp_422" + input: "^AssignVariableOp_423" + input: "^AssignVariableOp_424" + input: "^AssignVariableOp_425" + input: "^AssignVariableOp_426" + input: "^AssignVariableOp_427" + input: "^AssignVariableOp_428" + input: "^AssignVariableOp_429" + input: "^AssignVariableOp_430" + input: "^AssignVariableOp_431" + input: "^AssignVariableOp_432" + input: "^AssignVariableOp_433" + input: "^AssignVariableOp_434" + input: "^AssignVariableOp_435" + input: "^AssignVariableOp_436" + input: "^AssignVariableOp_437" + input: "^AssignVariableOp_438" + input: "^AssignVariableOp_439" + input: "^AssignVariableOp_440" + input: "^AssignVariableOp_441" + input: "^AssignVariableOp_442" + input: "^AssignVariableOp_443" + input: "^AssignVariableOp_444" + input: "^AssignVariableOp_445" + input: "^AssignVariableOp_446" + input: "^AssignVariableOp_447" + input: "^AssignVariableOp_448" + input: "^AssignVariableOp_449" + input: "^AssignVariableOp_450" + input: "^AssignVariableOp_451" + input: "^AssignVariableOp_452" + input: "^AssignVariableOp_453" + input: "^AssignVariableOp_454" + input: "^AssignVariableOp_455" + input: "^AssignVariableOp_456" + input: "^AssignVariableOp_457" + input: "^AssignVariableOp_458" + input: "^AssignVariableOp_459" + input: "^AssignVariableOp_460" + input: "^AssignVariableOp_461" + input: "^AssignVariableOp_462" + input: "^AssignVariableOp_463" + input: "^AssignVariableOp_464" + input: "^AssignVariableOp_465" + input: "^AssignVariableOp_466" + input: "^AssignVariableOp_467" + input: "^AssignVariableOp_468" + input: "^AssignVariableOp_469" + input: "^AssignVariableOp_470" + input: "^AssignVariableOp_471" + input: "^AssignVariableOp_472" + input: "^AssignVariableOp_473" + input: "^AssignVariableOp_474" + input: "^AssignVariableOp_475" + input: "^AssignVariableOp_476" + input: "^AssignVariableOp_477" + input: "^AssignVariableOp_478" + input: "^AssignVariableOp_479" + input: "^AssignVariableOp_480" + input: "^AssignVariableOp_481" + input: "^AssignVariableOp_482" + input: "^AssignVariableOp_483" + input: "^AssignVariableOp_484" + input: "^AssignVariableOp_485" + input: "^AssignVariableOp_486" + input: "^AssignVariableOp_487" + input: "^AssignVariableOp_488" + input: "^AssignVariableOp_489" + input: "^AssignVariableOp_490" + input: "^AssignVariableOp_491" + input: "^AssignVariableOp_492" + input: "^AssignVariableOp_493" + input: "^AssignVariableOp_494" + input: "^AssignVariableOp_495" + input: "^AssignVariableOp_496" + input: "^AssignVariableOp_497" + input: "^AssignVariableOp_498" + input: "^AssignVariableOp_499" + input: "^AssignVariableOp_500" + input: "^AssignVariableOp_501" + input: "^AssignVariableOp_502" + input: "^AssignVariableOp_503" + input: "^AssignVariableOp_504" + input: "^AssignVariableOp_505" + input: "^AssignVariableOp_506" + input: "^AssignVariableOp_507" + input: "^AssignVariableOp_508" + input: "^AssignVariableOp_509" + input: "^AssignVariableOp_510" + input: "^AssignVariableOp_511" + input: "^AssignVariableOp_512" + input: "^AssignVariableOp_513" + input: "^AssignVariableOp_514" + input: "^AssignVariableOp_515" + input: "^AssignVariableOp_516" + input: "^AssignVariableOp_517" + input: "^AssignVariableOp_518" + input: "^AssignVariableOp_519" + input: "^AssignVariableOp_520" + input: "^AssignVariableOp_521" + input: "^AssignVariableOp_522" + input: "^AssignVariableOp_523" + input: "^AssignVariableOp_524" + input: "^AssignVariableOp_525" + input: "^AssignVariableOp_526" + input: "^AssignVariableOp_527" + input: "^AssignVariableOp_528" + input: "^AssignVariableOp_529" + input: "^AssignVariableOp_530" + input: "^AssignVariableOp_531" + input: "^AssignVariableOp_532" + input: "^AssignVariableOp_533" + input: "^AssignVariableOp_534" + input: "^AssignVariableOp_535" + input: "^AssignVariableOp_536" + input: "^AssignVariableOp_537" + input: "^AssignVariableOp_538" + input: "^AssignVariableOp_539" + input: "^AssignVariableOp_540" + input: "^AssignVariableOp_541" + input: "^AssignVariableOp_542" + input: "^AssignVariableOp_543" + input: "^AssignVariableOp_544" + input: "^AssignVariableOp_545" + input: "^AssignVariableOp_546" + input: "^AssignVariableOp_547" + input: "^AssignVariableOp_548" + input: "^AssignVariableOp_549" + input: "^AssignVariableOp_550" + input: "^AssignVariableOp_551" + input: "^AssignVariableOp_552" + input: "^AssignVariableOp_553" + input: "^AssignVariableOp_554" + input: "^AssignVariableOp_555" + input: "^AssignVariableOp_556" + input: "^AssignVariableOp_557" + input: "^AssignVariableOp_558" + input: "^AssignVariableOp_559" + input: "^AssignVariableOp_560" + input: "^AssignVariableOp_561" + input: "^AssignVariableOp_562" + input: "^AssignVariableOp_563" + input: "^AssignVariableOp_564" + input: "^AssignVariableOp_565" + input: "^AssignVariableOp_566" + input: "^AssignVariableOp_567" + input: "^AssignVariableOp_568" + input: "^AssignVariableOp_569" + input: "^AssignVariableOp_570" + input: "^AssignVariableOp_571" + input: "^AssignVariableOp_572" + input: "^AssignVariableOp_573" + input: "^AssignVariableOp_574" + input: "^AssignVariableOp_575" + input: "^AssignVariableOp_576" + input: "^AssignVariableOp_577" + input: "^AssignVariableOp_578" + input: "^AssignVariableOp_579" + input: "^AssignVariableOp_580" + input: "^AssignVariableOp_581" + input: "^AssignVariableOp_582" + input: "^AssignVariableOp_583" + input: "^AssignVariableOp_584" + input: "^AssignVariableOp_585" + input: "^AssignVariableOp_586" + input: "^AssignVariableOp_587" + input: "^AssignVariableOp_588" + input: "^AssignVariableOp_589" + input: "^AssignVariableOp_590" + input: "^AssignVariableOp_591" + input: "^AssignVariableOp_592" + input: "^AssignVariableOp_593" + input: "^AssignVariableOp_594" + input: "^AssignVariableOp_595" + input: "^AssignVariableOp_596" + input: "^AssignVariableOp_597" + input: "^AssignVariableOp_598" + input: "^AssignVariableOp_599" + input: "^AssignVariableOp_600" + input: "^AssignVariableOp_601" + input: "^AssignVariableOp_602" + input: "^AssignVariableOp_603" + input: "^AssignVariableOp_604" + input: "^AssignVariableOp_605" + input: "^AssignVariableOp_606" + input: "^AssignVariableOp_607" + input: "^AssignVariableOp_608" + input: "^AssignVariableOp_609" + input: "^AssignVariableOp_610" + input: "^AssignVariableOp_611" + input: "^AssignVariableOp_612" + input: "^AssignVariableOp_613" + input: "^AssignVariableOp_614" + input: "^AssignVariableOp_615" + input: "^AssignVariableOp_616" + input: "^AssignVariableOp_617" + input: "^AssignVariableOp_618" + input: "^AssignVariableOp_619" + input: "^AssignVariableOp_620" + input: "^AssignVariableOp_621" + input: "^AssignVariableOp_622" + input: "^AssignVariableOp_623" + input: "^AssignVariableOp_624" + input: "^AssignVariableOp_625" + input: "^AssignVariableOp_626" + input: "^AssignVariableOp_627" + input: "^AssignVariableOp_628" + input: "^AssignVariableOp_629" + input: "^AssignVariableOp_630" + input: "^AssignVariableOp_631" + input: "^AssignVariableOp_632" + input: "^AssignVariableOp_633" + input: "^AssignVariableOp_634" + input: "^AssignVariableOp_635" + input: "^AssignVariableOp_636" + input: "^AssignVariableOp_637" + input: "^AssignVariableOp_638" + input: "^AssignVariableOp_639" + input: "^AssignVariableOp_640" + input: "^AssignVariableOp_641" + input: "^AssignVariableOp_642" + input: "^AssignVariableOp_643" + input: "^AssignVariableOp_644" + input: "^AssignVariableOp_645" + input: "^AssignVariableOp_646" + input: "^AssignVariableOp_647" + input: "^AssignVariableOp_648" + input: "^AssignVariableOp_649" + input: "^AssignVariableOp_650" + input: "^AssignVariableOp_651" + input: "^AssignVariableOp_652" + input: "^AssignVariableOp_653" + input: "^AssignVariableOp_654" + input: "^AssignVariableOp_655" + input: "^AssignVariableOp_656" + input: "^AssignVariableOp_657" + input: "^AssignVariableOp_658" + input: "^AssignVariableOp_659" + input: "^AssignVariableOp_660" + input: "^AssignVariableOp_661" + input: "^AssignVariableOp_662" + input: "^AssignVariableOp_663" + input: "^AssignVariableOp_664" + input: "^AssignVariableOp_665" + input: "^AssignVariableOp_666" + input: "^AssignVariableOp_667" + input: "^AssignVariableOp_668" + input: "^AssignVariableOp_669" + input: "^AssignVariableOp_670" + input: "^AssignVariableOp_671" + input: "^AssignVariableOp_672" + input: "^AssignVariableOp_673" + input: "^AssignVariableOp_674" + input: "^AssignVariableOp_675" + input: "^AssignVariableOp_676" + input: "^AssignVariableOp_677" + input: "^AssignVariableOp_678" + input: "^AssignVariableOp_679" + input: "^AssignVariableOp_680" + input: "^AssignVariableOp_681" + input: "^AssignVariableOp_682" + input: "^AssignVariableOp_683" + input: "^AssignVariableOp_684" + input: "^AssignVariableOp_685" + input: "^AssignVariableOp_686" + input: "^AssignVariableOp_687" + input: "^AssignVariableOp_688" + input: "^AssignVariableOp_689" + input: "^AssignVariableOp_690" + input: "^AssignVariableOp_691" + input: "^AssignVariableOp_692" + input: "^AssignVariableOp_693" + input: "^AssignVariableOp_694" + input: "^AssignVariableOp_695" + input: "^AssignVariableOp_696" + input: "^AssignVariableOp_697" + input: "^AssignVariableOp_698" + input: "^AssignVariableOp_699" + input: "^AssignVariableOp_700" + input: "^AssignVariableOp_701" + input: "^AssignVariableOp_702" + input: "^AssignVariableOp_703" + input: "^AssignVariableOp_704" + input: "^AssignVariableOp_705" + input: "^AssignVariableOp_706" + input: "^AssignVariableOp_707" + input: "^AssignVariableOp_708" + input: "^AssignVariableOp_709" + input: "^AssignVariableOp_710" + input: "^AssignVariableOp_711" + input: "^AssignVariableOp_712" + input: "^AssignVariableOp_713" + input: "^AssignVariableOp_714" + input: "^AssignVariableOp_715" + input: "^AssignVariableOp_716" + input: "^AssignVariableOp_717" + input: "^AssignVariableOp_718" + input: "^AssignVariableOp_719" + input: "^AssignVariableOp_720" + input: "^AssignVariableOp_721" + input: "^AssignVariableOp_722" + input: "^AssignVariableOp_723" + input: "^AssignVariableOp_724" + input: "^AssignVariableOp_725" + input: "^AssignVariableOp_726" + input: "^AssignVariableOp_727" + input: "^AssignVariableOp_728" + input: "^AssignVariableOp_729" + input: "^AssignVariableOp_730" + input: "^AssignVariableOp_731" + input: "^AssignVariableOp_732" + input: "^AssignVariableOp_733" + input: "^AssignVariableOp_734" + input: "^AssignVariableOp_735" + input: "^AssignVariableOp_736" + input: "^AssignVariableOp_737" + input: "^AssignVariableOp_738" + input: "^AssignVariableOp_739" + input: "^AssignVariableOp_740" + input: "^AssignVariableOp_741" + input: "^AssignVariableOp_742" + input: "^AssignVariableOp_743" + input: "^AssignVariableOp_744" + input: "^AssignVariableOp_745" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_872" + op: "ReadVariableOp" + input: "global_step" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } +} +node { + name: "add_674/y" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT64 + tensor_shape { + } + int64_val: 1 + } + } + } +} +node { + name: "add_674" + op: "AddV2" + input: "ReadVariableOp_872" + input: "add_674/y" + attr { + key: "T" + value { + type: DT_INT64 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "AssignVariableOp_746" + op: "AssignVariableOp" + input: "global_step" + input: "add_674" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + attr { + key: "validate_shape" + value { + b: true + } + } +} +node { + name: "ReadVariableOp_873" + op: "ReadVariableOp" + input: "global_step" + input: "^AssignVariableOp_746" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } +} +node { + name: "group_deps_1" + op: "NoOp" + input: "^AssignVariableOp_746" + input: "^group_deps" +} +node { + name: "loss/tags" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "loss" + } + } + } +} +node { + name: "loss" + op: "ScalarSummary" + input: "loss/tags" + input: "truediv" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "init" + op: "NoOp" + input: "^AssignVariableOp" + input: "^AssignVariableOp_1" + input: "^AssignVariableOp_10" + input: "^AssignVariableOp_100" + input: "^AssignVariableOp_101" + input: "^AssignVariableOp_102" + input: "^AssignVariableOp_103" + input: "^AssignVariableOp_104" + input: "^AssignVariableOp_105" + input: "^AssignVariableOp_106" + input: "^AssignVariableOp_107" + input: "^AssignVariableOp_108" + input: "^AssignVariableOp_109" + input: "^AssignVariableOp_11" + input: "^AssignVariableOp_110" + input: "^AssignVariableOp_111" + input: "^AssignVariableOp_112" + input: "^AssignVariableOp_113" + input: "^AssignVariableOp_114" + input: "^AssignVariableOp_115" + input: "^AssignVariableOp_116" + input: "^AssignVariableOp_117" + input: "^AssignVariableOp_118" + input: "^AssignVariableOp_119" + input: "^AssignVariableOp_12" + input: "^AssignVariableOp_120" + input: "^AssignVariableOp_121" + input: "^AssignVariableOp_122" + input: "^AssignVariableOp_123" + input: "^AssignVariableOp_124" + input: "^AssignVariableOp_125" + input: "^AssignVariableOp_126" + input: "^AssignVariableOp_127" + input: "^AssignVariableOp_128" + input: "^AssignVariableOp_129" + input: "^AssignVariableOp_13" + input: "^AssignVariableOp_130" + input: "^AssignVariableOp_131" + input: "^AssignVariableOp_132" + input: "^AssignVariableOp_133" + input: "^AssignVariableOp_134" + input: "^AssignVariableOp_135" + input: "^AssignVariableOp_136" + input: "^AssignVariableOp_137" + input: "^AssignVariableOp_138" + input: "^AssignVariableOp_139" + input: "^AssignVariableOp_14" + input: "^AssignVariableOp_140" + input: "^AssignVariableOp_141" + input: "^AssignVariableOp_142" + input: "^AssignVariableOp_143" + input: "^AssignVariableOp_144" + input: "^AssignVariableOp_145" + input: "^AssignVariableOp_146" + input: "^AssignVariableOp_147" + input: "^AssignVariableOp_148" + input: "^AssignVariableOp_15" + input: "^AssignVariableOp_16" + input: "^AssignVariableOp_17" + input: "^AssignVariableOp_18" + input: "^AssignVariableOp_19" + input: "^AssignVariableOp_2" + input: "^AssignVariableOp_20" + input: "^AssignVariableOp_21" + input: "^AssignVariableOp_22" + input: "^AssignVariableOp_23" + input: "^AssignVariableOp_24" + input: "^AssignVariableOp_25" + input: "^AssignVariableOp_26" + input: "^AssignVariableOp_27" + input: "^AssignVariableOp_28" + input: "^AssignVariableOp_29" + input: "^AssignVariableOp_3" + input: "^AssignVariableOp_30" + input: "^AssignVariableOp_31" + input: "^AssignVariableOp_32" + input: "^AssignVariableOp_33" + input: "^AssignVariableOp_34" + input: "^AssignVariableOp_35" + input: "^AssignVariableOp_36" + input: "^AssignVariableOp_37" + input: "^AssignVariableOp_38" + input: "^AssignVariableOp_39" + input: "^AssignVariableOp_4" + input: "^AssignVariableOp_40" + input: "^AssignVariableOp_41" + input: "^AssignVariableOp_42" + input: "^AssignVariableOp_43" + input: "^AssignVariableOp_44" + input: "^AssignVariableOp_45" + input: "^AssignVariableOp_46" + input: "^AssignVariableOp_47" + input: "^AssignVariableOp_48" + input: "^AssignVariableOp_49" + input: "^AssignVariableOp_5" + input: "^AssignVariableOp_50" + input: "^AssignVariableOp_51" + input: "^AssignVariableOp_52" + input: "^AssignVariableOp_53" + input: "^AssignVariableOp_54" + input: "^AssignVariableOp_55" + input: "^AssignVariableOp_56" + input: "^AssignVariableOp_57" + input: "^AssignVariableOp_58" + input: "^AssignVariableOp_59" + input: "^AssignVariableOp_6" + input: "^AssignVariableOp_60" + input: "^AssignVariableOp_61" + input: "^AssignVariableOp_62" + input: "^AssignVariableOp_63" + input: "^AssignVariableOp_64" + input: "^AssignVariableOp_65" + input: "^AssignVariableOp_66" + input: "^AssignVariableOp_67" + input: "^AssignVariableOp_68" + input: "^AssignVariableOp_69" + input: "^AssignVariableOp_7" + input: "^AssignVariableOp_70" + input: "^AssignVariableOp_71" + input: "^AssignVariableOp_72" + input: "^AssignVariableOp_73" + input: "^AssignVariableOp_74" + input: "^AssignVariableOp_75" + input: "^AssignVariableOp_76" + input: "^AssignVariableOp_77" + input: "^AssignVariableOp_78" + input: "^AssignVariableOp_79" + input: "^AssignVariableOp_8" + input: "^AssignVariableOp_80" + input: "^AssignVariableOp_81" + input: "^AssignVariableOp_82" + input: "^AssignVariableOp_83" + input: "^AssignVariableOp_84" + input: "^AssignVariableOp_85" + input: "^AssignVariableOp_86" + input: "^AssignVariableOp_87" + input: "^AssignVariableOp_88" + input: "^AssignVariableOp_89" + input: "^AssignVariableOp_9" + input: "^AssignVariableOp_90" + input: "^AssignVariableOp_91" + input: "^AssignVariableOp_92" + input: "^AssignVariableOp_93" + input: "^AssignVariableOp_94" + input: "^AssignVariableOp_95" + input: "^AssignVariableOp_96" + input: "^AssignVariableOp_97" + input: "^AssignVariableOp_98" + input: "^AssignVariableOp_99" + input: "^bert/embeddings/layer_normalization/beta/Assign" + input: "^bert/embeddings/layer_normalization/beta/adam_m/Assign" + input: "^bert/embeddings/layer_normalization/beta/adam_v/Assign" + input: "^bert/embeddings/layer_normalization/gamma/Assign" + input: "^bert/embeddings/layer_normalization/gamma/adam_m/Assign" + input: "^bert/embeddings/layer_normalization/gamma/adam_v/Assign" + input: "^bert/embeddings/position_embeddings/adam_m/Assign" + input: "^bert/embeddings/position_embeddings/adam_v/Assign" + input: "^bert/embeddings/token_type_embeddings/adam_m/Assign" + input: "^bert/embeddings/token_type_embeddings/adam_v/Assign" + input: "^bert/embeddings/word_embeddings/adam_m/Assign" + input: "^bert/embeddings/word_embeddings/adam_v/Assign" + input: "^bert/encoder/layer_0/attention/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_0/attention/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_0/attention/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_0/attention/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_0/attention/output/layer_normalization_1/beta/Assign" + input: "^bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m/Assign" + input: "^bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v/Assign" + input: "^bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/Assign" + input: "^bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m/Assign" + input: "^bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v/Assign" + input: "^bert/encoder/layer_0/attention/self/key/bias/adam_m/Assign" + input: "^bert/encoder/layer_0/attention/self/key/bias/adam_v/Assign" + input: "^bert/encoder/layer_0/attention/self/key/kernel/adam_m/Assign" + input: "^bert/encoder/layer_0/attention/self/key/kernel/adam_v/Assign" + input: "^bert/encoder/layer_0/attention/self/query/bias/adam_m/Assign" + input: "^bert/encoder/layer_0/attention/self/query/bias/adam_v/Assign" + input: "^bert/encoder/layer_0/attention/self/query/kernel/adam_m/Assign" + input: "^bert/encoder/layer_0/attention/self/query/kernel/adam_v/Assign" + input: "^bert/encoder/layer_0/attention/self/value/bias/adam_m/Assign" + input: "^bert/encoder/layer_0/attention/self/value/bias/adam_v/Assign" + input: "^bert/encoder/layer_0/attention/self/value/kernel/adam_m/Assign" + input: "^bert/encoder/layer_0/attention/self/value/kernel/adam_v/Assign" + input: "^bert/encoder/layer_0/intermediate/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_0/intermediate/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_0/intermediate/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_0/intermediate/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_0/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_0/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_0/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_0/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_0/output/layer_normalization_2/beta/Assign" + input: "^bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m/Assign" + input: "^bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v/Assign" + input: "^bert/encoder/layer_0/output/layer_normalization_2/gamma/Assign" + input: "^bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m/Assign" + input: "^bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v/Assign" + input: "^bert/encoder/layer_1/attention/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_1/attention/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_1/attention/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_1/attention/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_1/attention/output/layer_normalization_3/beta/Assign" + input: "^bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m/Assign" + input: "^bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v/Assign" + input: "^bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/Assign" + input: "^bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m/Assign" + input: "^bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v/Assign" + input: "^bert/encoder/layer_1/attention/self/key/bias/adam_m/Assign" + input: "^bert/encoder/layer_1/attention/self/key/bias/adam_v/Assign" + input: "^bert/encoder/layer_1/attention/self/key/kernel/adam_m/Assign" + input: "^bert/encoder/layer_1/attention/self/key/kernel/adam_v/Assign" + input: "^bert/encoder/layer_1/attention/self/query/bias/adam_m/Assign" + input: "^bert/encoder/layer_1/attention/self/query/bias/adam_v/Assign" + input: "^bert/encoder/layer_1/attention/self/query/kernel/adam_m/Assign" + input: "^bert/encoder/layer_1/attention/self/query/kernel/adam_v/Assign" + input: "^bert/encoder/layer_1/attention/self/value/bias/adam_m/Assign" + input: "^bert/encoder/layer_1/attention/self/value/bias/adam_v/Assign" + input: "^bert/encoder/layer_1/attention/self/value/kernel/adam_m/Assign" + input: "^bert/encoder/layer_1/attention/self/value/kernel/adam_v/Assign" + input: "^bert/encoder/layer_1/intermediate/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_1/intermediate/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_1/intermediate/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_1/intermediate/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_1/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_1/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_1/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_1/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_1/output/layer_normalization_4/beta/Assign" + input: "^bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m/Assign" + input: "^bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v/Assign" + input: "^bert/encoder/layer_1/output/layer_normalization_4/gamma/Assign" + input: "^bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m/Assign" + input: "^bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v/Assign" + input: "^bert/encoder/layer_10/attention/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_10/attention/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_10/attention/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_10/attention/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_10/attention/output/layer_normalization_21/beta/Assign" + input: "^bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m/Assign" + input: "^bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v/Assign" + input: "^bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/Assign" + input: "^bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m/Assign" + input: "^bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v/Assign" + input: "^bert/encoder/layer_10/attention/self/key/bias/adam_m/Assign" + input: "^bert/encoder/layer_10/attention/self/key/bias/adam_v/Assign" + input: "^bert/encoder/layer_10/attention/self/key/kernel/adam_m/Assign" + input: "^bert/encoder/layer_10/attention/self/key/kernel/adam_v/Assign" + input: "^bert/encoder/layer_10/attention/self/query/bias/adam_m/Assign" + input: "^bert/encoder/layer_10/attention/self/query/bias/adam_v/Assign" + input: "^bert/encoder/layer_10/attention/self/query/kernel/adam_m/Assign" + input: "^bert/encoder/layer_10/attention/self/query/kernel/adam_v/Assign" + input: "^bert/encoder/layer_10/attention/self/value/bias/adam_m/Assign" + input: "^bert/encoder/layer_10/attention/self/value/bias/adam_v/Assign" + input: "^bert/encoder/layer_10/attention/self/value/kernel/adam_m/Assign" + input: "^bert/encoder/layer_10/attention/self/value/kernel/adam_v/Assign" + input: "^bert/encoder/layer_10/intermediate/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_10/intermediate/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_10/intermediate/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_10/intermediate/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_10/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_10/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_10/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_10/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_10/output/layer_normalization_22/beta/Assign" + input: "^bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m/Assign" + input: "^bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v/Assign" + input: "^bert/encoder/layer_10/output/layer_normalization_22/gamma/Assign" + input: "^bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m/Assign" + input: "^bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v/Assign" + input: "^bert/encoder/layer_11/attention/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_11/attention/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_11/attention/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_11/attention/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_11/attention/output/layer_normalization_23/beta/Assign" + input: "^bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m/Assign" + input: "^bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v/Assign" + input: "^bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/Assign" + input: "^bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m/Assign" + input: "^bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v/Assign" + input: "^bert/encoder/layer_11/attention/self/key/bias/adam_m/Assign" + input: "^bert/encoder/layer_11/attention/self/key/bias/adam_v/Assign" + input: "^bert/encoder/layer_11/attention/self/key/kernel/adam_m/Assign" + input: "^bert/encoder/layer_11/attention/self/key/kernel/adam_v/Assign" + input: "^bert/encoder/layer_11/attention/self/query/bias/adam_m/Assign" + input: "^bert/encoder/layer_11/attention/self/query/bias/adam_v/Assign" + input: "^bert/encoder/layer_11/attention/self/query/kernel/adam_m/Assign" + input: "^bert/encoder/layer_11/attention/self/query/kernel/adam_v/Assign" + input: "^bert/encoder/layer_11/attention/self/value/bias/adam_m/Assign" + input: "^bert/encoder/layer_11/attention/self/value/bias/adam_v/Assign" + input: "^bert/encoder/layer_11/attention/self/value/kernel/adam_m/Assign" + input: "^bert/encoder/layer_11/attention/self/value/kernel/adam_v/Assign" + input: "^bert/encoder/layer_11/intermediate/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_11/intermediate/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_11/intermediate/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_11/intermediate/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_11/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_11/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_11/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_11/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_11/output/layer_normalization_24/beta/Assign" + input: "^bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m/Assign" + input: "^bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v/Assign" + input: "^bert/encoder/layer_11/output/layer_normalization_24/gamma/Assign" + input: "^bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m/Assign" + input: "^bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v/Assign" + input: "^bert/encoder/layer_2/attention/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_2/attention/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_2/attention/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_2/attention/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_2/attention/output/layer_normalization_5/beta/Assign" + input: "^bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m/Assign" + input: "^bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v/Assign" + input: "^bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/Assign" + input: "^bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m/Assign" + input: "^bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v/Assign" + input: "^bert/encoder/layer_2/attention/self/key/bias/adam_m/Assign" + input: "^bert/encoder/layer_2/attention/self/key/bias/adam_v/Assign" + input: "^bert/encoder/layer_2/attention/self/key/kernel/adam_m/Assign" + input: "^bert/encoder/layer_2/attention/self/key/kernel/adam_v/Assign" + input: "^bert/encoder/layer_2/attention/self/query/bias/adam_m/Assign" + input: "^bert/encoder/layer_2/attention/self/query/bias/adam_v/Assign" + input: "^bert/encoder/layer_2/attention/self/query/kernel/adam_m/Assign" + input: "^bert/encoder/layer_2/attention/self/query/kernel/adam_v/Assign" + input: "^bert/encoder/layer_2/attention/self/value/bias/adam_m/Assign" + input: "^bert/encoder/layer_2/attention/self/value/bias/adam_v/Assign" + input: "^bert/encoder/layer_2/attention/self/value/kernel/adam_m/Assign" + input: "^bert/encoder/layer_2/attention/self/value/kernel/adam_v/Assign" + input: "^bert/encoder/layer_2/intermediate/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_2/intermediate/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_2/intermediate/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_2/intermediate/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_2/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_2/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_2/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_2/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_2/output/layer_normalization_6/beta/Assign" + input: "^bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m/Assign" + input: "^bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v/Assign" + input: "^bert/encoder/layer_2/output/layer_normalization_6/gamma/Assign" + input: "^bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m/Assign" + input: "^bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v/Assign" + input: "^bert/encoder/layer_3/attention/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_3/attention/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_3/attention/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_3/attention/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_3/attention/output/layer_normalization_7/beta/Assign" + input: "^bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m/Assign" + input: "^bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v/Assign" + input: "^bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/Assign" + input: "^bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m/Assign" + input: "^bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v/Assign" + input: "^bert/encoder/layer_3/attention/self/key/bias/adam_m/Assign" + input: "^bert/encoder/layer_3/attention/self/key/bias/adam_v/Assign" + input: "^bert/encoder/layer_3/attention/self/key/kernel/adam_m/Assign" + input: "^bert/encoder/layer_3/attention/self/key/kernel/adam_v/Assign" + input: "^bert/encoder/layer_3/attention/self/query/bias/adam_m/Assign" + input: "^bert/encoder/layer_3/attention/self/query/bias/adam_v/Assign" + input: "^bert/encoder/layer_3/attention/self/query/kernel/adam_m/Assign" + input: "^bert/encoder/layer_3/attention/self/query/kernel/adam_v/Assign" + input: "^bert/encoder/layer_3/attention/self/value/bias/adam_m/Assign" + input: "^bert/encoder/layer_3/attention/self/value/bias/adam_v/Assign" + input: "^bert/encoder/layer_3/attention/self/value/kernel/adam_m/Assign" + input: "^bert/encoder/layer_3/attention/self/value/kernel/adam_v/Assign" + input: "^bert/encoder/layer_3/intermediate/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_3/intermediate/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_3/intermediate/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_3/intermediate/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_3/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_3/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_3/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_3/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_3/output/layer_normalization_8/beta/Assign" + input: "^bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m/Assign" + input: "^bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v/Assign" + input: "^bert/encoder/layer_3/output/layer_normalization_8/gamma/Assign" + input: "^bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m/Assign" + input: "^bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v/Assign" + input: "^bert/encoder/layer_4/attention/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_4/attention/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_4/attention/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_4/attention/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_4/attention/output/layer_normalization_9/beta/Assign" + input: "^bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m/Assign" + input: "^bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v/Assign" + input: "^bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/Assign" + input: "^bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m/Assign" + input: "^bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v/Assign" + input: "^bert/encoder/layer_4/attention/self/key/bias/adam_m/Assign" + input: "^bert/encoder/layer_4/attention/self/key/bias/adam_v/Assign" + input: "^bert/encoder/layer_4/attention/self/key/kernel/adam_m/Assign" + input: "^bert/encoder/layer_4/attention/self/key/kernel/adam_v/Assign" + input: "^bert/encoder/layer_4/attention/self/query/bias/adam_m/Assign" + input: "^bert/encoder/layer_4/attention/self/query/bias/adam_v/Assign" + input: "^bert/encoder/layer_4/attention/self/query/kernel/adam_m/Assign" + input: "^bert/encoder/layer_4/attention/self/query/kernel/adam_v/Assign" + input: "^bert/encoder/layer_4/attention/self/value/bias/adam_m/Assign" + input: "^bert/encoder/layer_4/attention/self/value/bias/adam_v/Assign" + input: "^bert/encoder/layer_4/attention/self/value/kernel/adam_m/Assign" + input: "^bert/encoder/layer_4/attention/self/value/kernel/adam_v/Assign" + input: "^bert/encoder/layer_4/intermediate/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_4/intermediate/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_4/intermediate/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_4/intermediate/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_4/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_4/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_4/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_4/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_4/output/layer_normalization_10/beta/Assign" + input: "^bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m/Assign" + input: "^bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v/Assign" + input: "^bert/encoder/layer_4/output/layer_normalization_10/gamma/Assign" + input: "^bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m/Assign" + input: "^bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v/Assign" + input: "^bert/encoder/layer_5/attention/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_5/attention/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_5/attention/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_5/attention/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_5/attention/output/layer_normalization_11/beta/Assign" + input: "^bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m/Assign" + input: "^bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v/Assign" + input: "^bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/Assign" + input: "^bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m/Assign" + input: "^bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v/Assign" + input: "^bert/encoder/layer_5/attention/self/key/bias/adam_m/Assign" + input: "^bert/encoder/layer_5/attention/self/key/bias/adam_v/Assign" + input: "^bert/encoder/layer_5/attention/self/key/kernel/adam_m/Assign" + input: "^bert/encoder/layer_5/attention/self/key/kernel/adam_v/Assign" + input: "^bert/encoder/layer_5/attention/self/query/bias/adam_m/Assign" + input: "^bert/encoder/layer_5/attention/self/query/bias/adam_v/Assign" + input: "^bert/encoder/layer_5/attention/self/query/kernel/adam_m/Assign" + input: "^bert/encoder/layer_5/attention/self/query/kernel/adam_v/Assign" + input: "^bert/encoder/layer_5/attention/self/value/bias/adam_m/Assign" + input: "^bert/encoder/layer_5/attention/self/value/bias/adam_v/Assign" + input: "^bert/encoder/layer_5/attention/self/value/kernel/adam_m/Assign" + input: "^bert/encoder/layer_5/attention/self/value/kernel/adam_v/Assign" + input: "^bert/encoder/layer_5/intermediate/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_5/intermediate/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_5/intermediate/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_5/intermediate/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_5/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_5/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_5/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_5/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_5/output/layer_normalization_12/beta/Assign" + input: "^bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m/Assign" + input: "^bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v/Assign" + input: "^bert/encoder/layer_5/output/layer_normalization_12/gamma/Assign" + input: "^bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m/Assign" + input: "^bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v/Assign" + input: "^bert/encoder/layer_6/attention/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_6/attention/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_6/attention/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_6/attention/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_6/attention/output/layer_normalization_13/beta/Assign" + input: "^bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m/Assign" + input: "^bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v/Assign" + input: "^bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/Assign" + input: "^bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m/Assign" + input: "^bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v/Assign" + input: "^bert/encoder/layer_6/attention/self/key/bias/adam_m/Assign" + input: "^bert/encoder/layer_6/attention/self/key/bias/adam_v/Assign" + input: "^bert/encoder/layer_6/attention/self/key/kernel/adam_m/Assign" + input: "^bert/encoder/layer_6/attention/self/key/kernel/adam_v/Assign" + input: "^bert/encoder/layer_6/attention/self/query/bias/adam_m/Assign" + input: "^bert/encoder/layer_6/attention/self/query/bias/adam_v/Assign" + input: "^bert/encoder/layer_6/attention/self/query/kernel/adam_m/Assign" + input: "^bert/encoder/layer_6/attention/self/query/kernel/adam_v/Assign" + input: "^bert/encoder/layer_6/attention/self/value/bias/adam_m/Assign" + input: "^bert/encoder/layer_6/attention/self/value/bias/adam_v/Assign" + input: "^bert/encoder/layer_6/attention/self/value/kernel/adam_m/Assign" + input: "^bert/encoder/layer_6/attention/self/value/kernel/adam_v/Assign" + input: "^bert/encoder/layer_6/intermediate/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_6/intermediate/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_6/intermediate/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_6/intermediate/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_6/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_6/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_6/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_6/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_6/output/layer_normalization_14/beta/Assign" + input: "^bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m/Assign" + input: "^bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v/Assign" + input: "^bert/encoder/layer_6/output/layer_normalization_14/gamma/Assign" + input: "^bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m/Assign" + input: "^bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v/Assign" + input: "^bert/encoder/layer_7/attention/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_7/attention/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_7/attention/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_7/attention/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_7/attention/output/layer_normalization_15/beta/Assign" + input: "^bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m/Assign" + input: "^bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v/Assign" + input: "^bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/Assign" + input: "^bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m/Assign" + input: "^bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v/Assign" + input: "^bert/encoder/layer_7/attention/self/key/bias/adam_m/Assign" + input: "^bert/encoder/layer_7/attention/self/key/bias/adam_v/Assign" + input: "^bert/encoder/layer_7/attention/self/key/kernel/adam_m/Assign" + input: "^bert/encoder/layer_7/attention/self/key/kernel/adam_v/Assign" + input: "^bert/encoder/layer_7/attention/self/query/bias/adam_m/Assign" + input: "^bert/encoder/layer_7/attention/self/query/bias/adam_v/Assign" + input: "^bert/encoder/layer_7/attention/self/query/kernel/adam_m/Assign" + input: "^bert/encoder/layer_7/attention/self/query/kernel/adam_v/Assign" + input: "^bert/encoder/layer_7/attention/self/value/bias/adam_m/Assign" + input: "^bert/encoder/layer_7/attention/self/value/bias/adam_v/Assign" + input: "^bert/encoder/layer_7/attention/self/value/kernel/adam_m/Assign" + input: "^bert/encoder/layer_7/attention/self/value/kernel/adam_v/Assign" + input: "^bert/encoder/layer_7/intermediate/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_7/intermediate/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_7/intermediate/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_7/intermediate/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_7/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_7/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_7/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_7/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_7/output/layer_normalization_16/beta/Assign" + input: "^bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m/Assign" + input: "^bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v/Assign" + input: "^bert/encoder/layer_7/output/layer_normalization_16/gamma/Assign" + input: "^bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m/Assign" + input: "^bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v/Assign" + input: "^bert/encoder/layer_8/attention/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_8/attention/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_8/attention/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_8/attention/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_8/attention/output/layer_normalization_17/beta/Assign" + input: "^bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m/Assign" + input: "^bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v/Assign" + input: "^bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/Assign" + input: "^bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m/Assign" + input: "^bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v/Assign" + input: "^bert/encoder/layer_8/attention/self/key/bias/adam_m/Assign" + input: "^bert/encoder/layer_8/attention/self/key/bias/adam_v/Assign" + input: "^bert/encoder/layer_8/attention/self/key/kernel/adam_m/Assign" + input: "^bert/encoder/layer_8/attention/self/key/kernel/adam_v/Assign" + input: "^bert/encoder/layer_8/attention/self/query/bias/adam_m/Assign" + input: "^bert/encoder/layer_8/attention/self/query/bias/adam_v/Assign" + input: "^bert/encoder/layer_8/attention/self/query/kernel/adam_m/Assign" + input: "^bert/encoder/layer_8/attention/self/query/kernel/adam_v/Assign" + input: "^bert/encoder/layer_8/attention/self/value/bias/adam_m/Assign" + input: "^bert/encoder/layer_8/attention/self/value/bias/adam_v/Assign" + input: "^bert/encoder/layer_8/attention/self/value/kernel/adam_m/Assign" + input: "^bert/encoder/layer_8/attention/self/value/kernel/adam_v/Assign" + input: "^bert/encoder/layer_8/intermediate/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_8/intermediate/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_8/intermediate/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_8/intermediate/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_8/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_8/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_8/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_8/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_8/output/layer_normalization_18/beta/Assign" + input: "^bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m/Assign" + input: "^bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v/Assign" + input: "^bert/encoder/layer_8/output/layer_normalization_18/gamma/Assign" + input: "^bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m/Assign" + input: "^bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v/Assign" + input: "^bert/encoder/layer_9/attention/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_9/attention/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_9/attention/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_9/attention/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_9/attention/output/layer_normalization_19/beta/Assign" + input: "^bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m/Assign" + input: "^bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v/Assign" + input: "^bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/Assign" + input: "^bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m/Assign" + input: "^bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v/Assign" + input: "^bert/encoder/layer_9/attention/self/key/bias/adam_m/Assign" + input: "^bert/encoder/layer_9/attention/self/key/bias/adam_v/Assign" + input: "^bert/encoder/layer_9/attention/self/key/kernel/adam_m/Assign" + input: "^bert/encoder/layer_9/attention/self/key/kernel/adam_v/Assign" + input: "^bert/encoder/layer_9/attention/self/query/bias/adam_m/Assign" + input: "^bert/encoder/layer_9/attention/self/query/bias/adam_v/Assign" + input: "^bert/encoder/layer_9/attention/self/query/kernel/adam_m/Assign" + input: "^bert/encoder/layer_9/attention/self/query/kernel/adam_v/Assign" + input: "^bert/encoder/layer_9/attention/self/value/bias/adam_m/Assign" + input: "^bert/encoder/layer_9/attention/self/value/bias/adam_v/Assign" + input: "^bert/encoder/layer_9/attention/self/value/kernel/adam_m/Assign" + input: "^bert/encoder/layer_9/attention/self/value/kernel/adam_v/Assign" + input: "^bert/encoder/layer_9/intermediate/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_9/intermediate/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_9/intermediate/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_9/intermediate/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_9/output/dense/bias/adam_m/Assign" + input: "^bert/encoder/layer_9/output/dense/bias/adam_v/Assign" + input: "^bert/encoder/layer_9/output/dense/kernel/adam_m/Assign" + input: "^bert/encoder/layer_9/output/dense/kernel/adam_v/Assign" + input: "^bert/encoder/layer_9/output/layer_normalization_20/beta/Assign" + input: "^bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m/Assign" + input: "^bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v/Assign" + input: "^bert/encoder/layer_9/output/layer_normalization_20/gamma/Assign" + input: "^bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m/Assign" + input: "^bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v/Assign" + input: "^cls/squad/output_bias/Assign" + input: "^cls/squad/output_bias/adam_m/Assign" + input: "^cls/squad/output_bias/adam_v/Assign" + input: "^cls/squad/output_weights/Assign" + input: "^cls/squad/output_weights/adam_m/Assign" + input: "^cls/squad/output_weights/adam_v/Assign" + input: "^global_step/Assign" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } +} +node { + name: "init_1" + op: "NoOp" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } +} +node { + name: "group_deps_2" + op: "NoOp" + input: "^init" + input: "^init_1" +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "global_step" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_1" + op: "VarIsInitializedOp" + input: "bert/embeddings/word_embeddings" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_2" + op: "VarIsInitializedOp" + input: "bert/embeddings/token_type_embeddings" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_3" + op: "VarIsInitializedOp" + input: "bert/embeddings/position_embeddings" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_4" + op: "VarIsInitializedOp" + input: "bert/embeddings/layer_normalization/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_5" + op: "VarIsInitializedOp" + input: "bert/embeddings/layer_normalization/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_6" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_7" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_8" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_9" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_10" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_11" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_12" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_13" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_14" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_15" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_16" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_17" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_18" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_19" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_20" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_21" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_22" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_23" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_24" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_25" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_26" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_27" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_28" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_29" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_30" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_31" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_32" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_33" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_34" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_35" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_36" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_37" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_38" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_39" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_40" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_41" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_42" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_43" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_44" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_45" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_46" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_47" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_48" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_49" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_50" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_51" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_52" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_53" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_54" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_55" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_56" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_57" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_58" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_59" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_60" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_61" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_62" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_63" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_64" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_65" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_66" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_67" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_68" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_69" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_70" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_71" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_72" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_73" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_74" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_75" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_76" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_77" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_78" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_79" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_80" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_81" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_82" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_83" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_84" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_85" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_86" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_87" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_88" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_89" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_90" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_91" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_92" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_93" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_94" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_95" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_96" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_97" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_98" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_99" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_100" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_101" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_102" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_103" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_104" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_105" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_106" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_107" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_108" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_109" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_110" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_111" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_112" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_113" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_114" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_115" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_116" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_117" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_118" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_119" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_120" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_121" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_122" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_123" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_124" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_125" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_126" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_127" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_128" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_129" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_130" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_131" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_132" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_133" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_134" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_135" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_136" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_137" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_138" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_139" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_140" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_141" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_142" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_143" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_144" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_145" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_146" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_147" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_148" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_149" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_150" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_151" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_152" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_153" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_154" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_155" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_156" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_157" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_158" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_159" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_160" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_161" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_162" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_163" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_164" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_165" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_166" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_167" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_168" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_169" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_170" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_171" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_172" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_173" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_174" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_175" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_176" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_177" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_178" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_179" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_180" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_181" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_182" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_183" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_184" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_185" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_186" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_187" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_188" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_189" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_190" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_191" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_192" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_193" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_194" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_195" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_196" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_197" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_198" + op: "VarIsInitializedOp" + input: "bert/pooler/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_199" + op: "VarIsInitializedOp" + input: "bert/pooler/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_200" + op: "VarIsInitializedOp" + input: "cls/squad/output_weights" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_201" + op: "VarIsInitializedOp" + input: "cls/squad/output_bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_202" + op: "VarIsInitializedOp" + input: "bert/embeddings/word_embeddings/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_203" + op: "VarIsInitializedOp" + input: "bert/embeddings/word_embeddings/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_204" + op: "VarIsInitializedOp" + input: "bert/embeddings/token_type_embeddings/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_205" + op: "VarIsInitializedOp" + input: "bert/embeddings/token_type_embeddings/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_206" + op: "VarIsInitializedOp" + input: "bert/embeddings/position_embeddings/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_207" + op: "VarIsInitializedOp" + input: "bert/embeddings/position_embeddings/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_208" + op: "VarIsInitializedOp" + input: "bert/embeddings/layer_normalization/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_209" + op: "VarIsInitializedOp" + input: "bert/embeddings/layer_normalization/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_210" + op: "VarIsInitializedOp" + input: "bert/embeddings/layer_normalization/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_211" + op: "VarIsInitializedOp" + input: "bert/embeddings/layer_normalization/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_212" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_213" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_214" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_215" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_216" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_217" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_218" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_219" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_220" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_221" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_222" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_223" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_224" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_225" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_226" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_227" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_228" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_229" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_230" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_231" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_232" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_233" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_234" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_235" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_236" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_237" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_238" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_239" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_240" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_241" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_242" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_243" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_244" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_245" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_246" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_247" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_248" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_249" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_250" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_251" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_252" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_253" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_254" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_255" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_256" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_257" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_258" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_259" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_260" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_261" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_262" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_263" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_264" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_265" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_266" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_267" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_268" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_269" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_270" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_271" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_272" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_273" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_274" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_275" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_276" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_277" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_278" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_279" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_280" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_281" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_282" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_283" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_284" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_285" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_286" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_287" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_288" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_289" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_290" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_291" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_292" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_293" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_294" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_295" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_296" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_297" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_298" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_299" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_300" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_301" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_302" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_303" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_304" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_305" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_306" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_307" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_308" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_309" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_310" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_311" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_312" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_313" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_314" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_315" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_316" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_317" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_318" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_319" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_320" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_321" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_322" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_323" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_324" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_325" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_326" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_327" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_328" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_329" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_330" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_331" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_332" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_333" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_334" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_335" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_336" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_337" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_338" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_339" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_340" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_341" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_342" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_343" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_344" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_345" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_346" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_347" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_348" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_349" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_350" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_351" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_352" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_353" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_354" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_355" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_356" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_357" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_358" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_359" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_360" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_361" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_362" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_363" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_364" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_365" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_366" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_367" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_368" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_369" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_370" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_371" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_372" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_373" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_374" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_375" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_376" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_377" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_378" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_379" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_380" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_381" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_382" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_383" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_384" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_385" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_386" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_387" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_388" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_389" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_390" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_391" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_392" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_393" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_394" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_395" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_396" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_397" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_398" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_399" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_400" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_401" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_402" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_403" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_404" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_405" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_406" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_407" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_408" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_409" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_410" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_411" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_412" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_413" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_414" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_415" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_416" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_417" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_418" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_419" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_420" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_421" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_422" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_423" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_424" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_425" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_426" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_427" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_428" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_429" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_430" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_431" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_432" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_433" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_434" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_435" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_436" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_437" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_438" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_439" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_440" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_441" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_442" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_443" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_444" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_445" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_446" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_447" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_448" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_449" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_450" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_451" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_452" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_453" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_454" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_455" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_456" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_457" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_458" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_459" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_460" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_461" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_462" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_463" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_464" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_465" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_466" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_467" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_468" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_469" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_470" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_471" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_472" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_473" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_474" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_475" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_476" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_477" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_478" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_479" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_480" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_481" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_482" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_483" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_484" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_485" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_486" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_487" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_488" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_489" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_490" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_491" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_492" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_493" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_494" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_495" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_496" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_497" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_498" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_499" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_500" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_501" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_502" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_503" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_504" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_505" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_506" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_507" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_508" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_509" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_510" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_511" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_512" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_513" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_514" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_515" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_516" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_517" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_518" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_519" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_520" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_521" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_522" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_523" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_524" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_525" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_526" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_527" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_528" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_529" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_530" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_531" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_532" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_533" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_534" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_535" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_536" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_537" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_538" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_539" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_540" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_541" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_542" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_543" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_544" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_545" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_546" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_547" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_548" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_549" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_550" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_551" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_552" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_553" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_554" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_555" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_556" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_557" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_558" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_559" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_560" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_561" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_562" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_563" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_564" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_565" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_566" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_567" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_568" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_569" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_570" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_571" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_572" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_573" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_574" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_575" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_576" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_577" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_578" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_579" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_580" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_581" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_582" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_583" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_584" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_585" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_586" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_587" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_588" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_589" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_590" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_591" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_592" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_593" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_594" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_595" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_596" + op: "VarIsInitializedOp" + input: "cls/squad/output_weights/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_597" + op: "VarIsInitializedOp" + input: "cls/squad/output_weights/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_598" + op: "VarIsInitializedOp" + input: "cls/squad/output_bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/VarIsInitializedOp_599" + op: "VarIsInitializedOp" + input: "cls/squad/output_bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables/stack" + op: "Pack" + input: "report_uninitialized_variables/VarIsInitializedOp" + input: "report_uninitialized_variables/VarIsInitializedOp_1" + input: "report_uninitialized_variables/VarIsInitializedOp_2" + input: "report_uninitialized_variables/VarIsInitializedOp_3" + input: "report_uninitialized_variables/VarIsInitializedOp_4" + input: "report_uninitialized_variables/VarIsInitializedOp_5" + input: "report_uninitialized_variables/VarIsInitializedOp_6" + input: "report_uninitialized_variables/VarIsInitializedOp_7" + input: "report_uninitialized_variables/VarIsInitializedOp_8" + input: "report_uninitialized_variables/VarIsInitializedOp_9" + input: "report_uninitialized_variables/VarIsInitializedOp_10" + input: "report_uninitialized_variables/VarIsInitializedOp_11" + input: "report_uninitialized_variables/VarIsInitializedOp_12" + input: "report_uninitialized_variables/VarIsInitializedOp_13" + input: "report_uninitialized_variables/VarIsInitializedOp_14" + input: "report_uninitialized_variables/VarIsInitializedOp_15" + input: "report_uninitialized_variables/VarIsInitializedOp_16" + input: "report_uninitialized_variables/VarIsInitializedOp_17" + input: "report_uninitialized_variables/VarIsInitializedOp_18" + input: "report_uninitialized_variables/VarIsInitializedOp_19" + input: "report_uninitialized_variables/VarIsInitializedOp_20" + input: "report_uninitialized_variables/VarIsInitializedOp_21" + input: "report_uninitialized_variables/VarIsInitializedOp_22" + input: "report_uninitialized_variables/VarIsInitializedOp_23" + input: "report_uninitialized_variables/VarIsInitializedOp_24" + input: "report_uninitialized_variables/VarIsInitializedOp_25" + input: "report_uninitialized_variables/VarIsInitializedOp_26" + input: "report_uninitialized_variables/VarIsInitializedOp_27" + input: "report_uninitialized_variables/VarIsInitializedOp_28" + input: "report_uninitialized_variables/VarIsInitializedOp_29" + input: "report_uninitialized_variables/VarIsInitializedOp_30" + input: "report_uninitialized_variables/VarIsInitializedOp_31" + input: "report_uninitialized_variables/VarIsInitializedOp_32" + input: "report_uninitialized_variables/VarIsInitializedOp_33" + input: "report_uninitialized_variables/VarIsInitializedOp_34" + input: "report_uninitialized_variables/VarIsInitializedOp_35" + input: "report_uninitialized_variables/VarIsInitializedOp_36" + input: "report_uninitialized_variables/VarIsInitializedOp_37" + input: "report_uninitialized_variables/VarIsInitializedOp_38" + input: "report_uninitialized_variables/VarIsInitializedOp_39" + input: "report_uninitialized_variables/VarIsInitializedOp_40" + input: "report_uninitialized_variables/VarIsInitializedOp_41" + input: "report_uninitialized_variables/VarIsInitializedOp_42" + input: "report_uninitialized_variables/VarIsInitializedOp_43" + input: "report_uninitialized_variables/VarIsInitializedOp_44" + input: "report_uninitialized_variables/VarIsInitializedOp_45" + input: "report_uninitialized_variables/VarIsInitializedOp_46" + input: "report_uninitialized_variables/VarIsInitializedOp_47" + input: "report_uninitialized_variables/VarIsInitializedOp_48" + input: "report_uninitialized_variables/VarIsInitializedOp_49" + input: "report_uninitialized_variables/VarIsInitializedOp_50" + input: "report_uninitialized_variables/VarIsInitializedOp_51" + input: "report_uninitialized_variables/VarIsInitializedOp_52" + input: "report_uninitialized_variables/VarIsInitializedOp_53" + input: "report_uninitialized_variables/VarIsInitializedOp_54" + input: "report_uninitialized_variables/VarIsInitializedOp_55" + input: "report_uninitialized_variables/VarIsInitializedOp_56" + input: "report_uninitialized_variables/VarIsInitializedOp_57" + input: "report_uninitialized_variables/VarIsInitializedOp_58" + input: "report_uninitialized_variables/VarIsInitializedOp_59" + input: "report_uninitialized_variables/VarIsInitializedOp_60" + input: "report_uninitialized_variables/VarIsInitializedOp_61" + input: "report_uninitialized_variables/VarIsInitializedOp_62" + input: "report_uninitialized_variables/VarIsInitializedOp_63" + input: "report_uninitialized_variables/VarIsInitializedOp_64" + input: "report_uninitialized_variables/VarIsInitializedOp_65" + input: "report_uninitialized_variables/VarIsInitializedOp_66" + input: "report_uninitialized_variables/VarIsInitializedOp_67" + input: "report_uninitialized_variables/VarIsInitializedOp_68" + input: "report_uninitialized_variables/VarIsInitializedOp_69" + input: "report_uninitialized_variables/VarIsInitializedOp_70" + input: "report_uninitialized_variables/VarIsInitializedOp_71" + input: "report_uninitialized_variables/VarIsInitializedOp_72" + input: "report_uninitialized_variables/VarIsInitializedOp_73" + input: "report_uninitialized_variables/VarIsInitializedOp_74" + input: "report_uninitialized_variables/VarIsInitializedOp_75" + input: "report_uninitialized_variables/VarIsInitializedOp_76" + input: "report_uninitialized_variables/VarIsInitializedOp_77" + input: "report_uninitialized_variables/VarIsInitializedOp_78" + input: "report_uninitialized_variables/VarIsInitializedOp_79" + input: "report_uninitialized_variables/VarIsInitializedOp_80" + input: "report_uninitialized_variables/VarIsInitializedOp_81" + input: "report_uninitialized_variables/VarIsInitializedOp_82" + input: "report_uninitialized_variables/VarIsInitializedOp_83" + input: "report_uninitialized_variables/VarIsInitializedOp_84" + input: "report_uninitialized_variables/VarIsInitializedOp_85" + input: "report_uninitialized_variables/VarIsInitializedOp_86" + input: "report_uninitialized_variables/VarIsInitializedOp_87" + input: "report_uninitialized_variables/VarIsInitializedOp_88" + input: "report_uninitialized_variables/VarIsInitializedOp_89" + input: "report_uninitialized_variables/VarIsInitializedOp_90" + input: "report_uninitialized_variables/VarIsInitializedOp_91" + input: "report_uninitialized_variables/VarIsInitializedOp_92" + input: "report_uninitialized_variables/VarIsInitializedOp_93" + input: "report_uninitialized_variables/VarIsInitializedOp_94" + input: "report_uninitialized_variables/VarIsInitializedOp_95" + input: "report_uninitialized_variables/VarIsInitializedOp_96" + input: "report_uninitialized_variables/VarIsInitializedOp_97" + input: "report_uninitialized_variables/VarIsInitializedOp_98" + input: "report_uninitialized_variables/VarIsInitializedOp_99" + input: "report_uninitialized_variables/VarIsInitializedOp_100" + input: "report_uninitialized_variables/VarIsInitializedOp_101" + input: "report_uninitialized_variables/VarIsInitializedOp_102" + input: "report_uninitialized_variables/VarIsInitializedOp_103" + input: "report_uninitialized_variables/VarIsInitializedOp_104" + input: "report_uninitialized_variables/VarIsInitializedOp_105" + input: "report_uninitialized_variables/VarIsInitializedOp_106" + input: "report_uninitialized_variables/VarIsInitializedOp_107" + input: "report_uninitialized_variables/VarIsInitializedOp_108" + input: "report_uninitialized_variables/VarIsInitializedOp_109" + input: "report_uninitialized_variables/VarIsInitializedOp_110" + input: "report_uninitialized_variables/VarIsInitializedOp_111" + input: "report_uninitialized_variables/VarIsInitializedOp_112" + input: "report_uninitialized_variables/VarIsInitializedOp_113" + input: "report_uninitialized_variables/VarIsInitializedOp_114" + input: "report_uninitialized_variables/VarIsInitializedOp_115" + input: "report_uninitialized_variables/VarIsInitializedOp_116" + input: "report_uninitialized_variables/VarIsInitializedOp_117" + input: "report_uninitialized_variables/VarIsInitializedOp_118" + input: "report_uninitialized_variables/VarIsInitializedOp_119" + input: "report_uninitialized_variables/VarIsInitializedOp_120" + input: "report_uninitialized_variables/VarIsInitializedOp_121" + input: "report_uninitialized_variables/VarIsInitializedOp_122" + input: "report_uninitialized_variables/VarIsInitializedOp_123" + input: "report_uninitialized_variables/VarIsInitializedOp_124" + input: "report_uninitialized_variables/VarIsInitializedOp_125" + input: "report_uninitialized_variables/VarIsInitializedOp_126" + input: "report_uninitialized_variables/VarIsInitializedOp_127" + input: "report_uninitialized_variables/VarIsInitializedOp_128" + input: "report_uninitialized_variables/VarIsInitializedOp_129" + input: "report_uninitialized_variables/VarIsInitializedOp_130" + input: "report_uninitialized_variables/VarIsInitializedOp_131" + input: "report_uninitialized_variables/VarIsInitializedOp_132" + input: "report_uninitialized_variables/VarIsInitializedOp_133" + input: "report_uninitialized_variables/VarIsInitializedOp_134" + input: "report_uninitialized_variables/VarIsInitializedOp_135" + input: "report_uninitialized_variables/VarIsInitializedOp_136" + input: "report_uninitialized_variables/VarIsInitializedOp_137" + input: "report_uninitialized_variables/VarIsInitializedOp_138" + input: "report_uninitialized_variables/VarIsInitializedOp_139" + input: "report_uninitialized_variables/VarIsInitializedOp_140" + input: "report_uninitialized_variables/VarIsInitializedOp_141" + input: "report_uninitialized_variables/VarIsInitializedOp_142" + input: "report_uninitialized_variables/VarIsInitializedOp_143" + input: "report_uninitialized_variables/VarIsInitializedOp_144" + input: "report_uninitialized_variables/VarIsInitializedOp_145" + input: "report_uninitialized_variables/VarIsInitializedOp_146" + input: "report_uninitialized_variables/VarIsInitializedOp_147" + input: "report_uninitialized_variables/VarIsInitializedOp_148" + input: "report_uninitialized_variables/VarIsInitializedOp_149" + input: "report_uninitialized_variables/VarIsInitializedOp_150" + input: "report_uninitialized_variables/VarIsInitializedOp_151" + input: "report_uninitialized_variables/VarIsInitializedOp_152" + input: "report_uninitialized_variables/VarIsInitializedOp_153" + input: "report_uninitialized_variables/VarIsInitializedOp_154" + input: "report_uninitialized_variables/VarIsInitializedOp_155" + input: "report_uninitialized_variables/VarIsInitializedOp_156" + input: "report_uninitialized_variables/VarIsInitializedOp_157" + input: "report_uninitialized_variables/VarIsInitializedOp_158" + input: "report_uninitialized_variables/VarIsInitializedOp_159" + input: "report_uninitialized_variables/VarIsInitializedOp_160" + input: "report_uninitialized_variables/VarIsInitializedOp_161" + input: "report_uninitialized_variables/VarIsInitializedOp_162" + input: "report_uninitialized_variables/VarIsInitializedOp_163" + input: "report_uninitialized_variables/VarIsInitializedOp_164" + input: "report_uninitialized_variables/VarIsInitializedOp_165" + input: "report_uninitialized_variables/VarIsInitializedOp_166" + input: "report_uninitialized_variables/VarIsInitializedOp_167" + input: "report_uninitialized_variables/VarIsInitializedOp_168" + input: "report_uninitialized_variables/VarIsInitializedOp_169" + input: "report_uninitialized_variables/VarIsInitializedOp_170" + input: "report_uninitialized_variables/VarIsInitializedOp_171" + input: "report_uninitialized_variables/VarIsInitializedOp_172" + input: "report_uninitialized_variables/VarIsInitializedOp_173" + input: "report_uninitialized_variables/VarIsInitializedOp_174" + input: "report_uninitialized_variables/VarIsInitializedOp_175" + input: "report_uninitialized_variables/VarIsInitializedOp_176" + input: "report_uninitialized_variables/VarIsInitializedOp_177" + input: "report_uninitialized_variables/VarIsInitializedOp_178" + input: "report_uninitialized_variables/VarIsInitializedOp_179" + input: "report_uninitialized_variables/VarIsInitializedOp_180" + input: "report_uninitialized_variables/VarIsInitializedOp_181" + input: "report_uninitialized_variables/VarIsInitializedOp_182" + input: "report_uninitialized_variables/VarIsInitializedOp_183" + input: "report_uninitialized_variables/VarIsInitializedOp_184" + input: "report_uninitialized_variables/VarIsInitializedOp_185" + input: "report_uninitialized_variables/VarIsInitializedOp_186" + input: "report_uninitialized_variables/VarIsInitializedOp_187" + input: "report_uninitialized_variables/VarIsInitializedOp_188" + input: "report_uninitialized_variables/VarIsInitializedOp_189" + input: "report_uninitialized_variables/VarIsInitializedOp_190" + input: "report_uninitialized_variables/VarIsInitializedOp_191" + input: "report_uninitialized_variables/VarIsInitializedOp_192" + input: "report_uninitialized_variables/VarIsInitializedOp_193" + input: "report_uninitialized_variables/VarIsInitializedOp_194" + input: "report_uninitialized_variables/VarIsInitializedOp_195" + input: "report_uninitialized_variables/VarIsInitializedOp_196" + input: "report_uninitialized_variables/VarIsInitializedOp_197" + input: "report_uninitialized_variables/VarIsInitializedOp_198" + input: "report_uninitialized_variables/VarIsInitializedOp_199" + input: "report_uninitialized_variables/VarIsInitializedOp_200" + input: "report_uninitialized_variables/VarIsInitializedOp_201" + input: "report_uninitialized_variables/VarIsInitializedOp_202" + input: "report_uninitialized_variables/VarIsInitializedOp_203" + input: "report_uninitialized_variables/VarIsInitializedOp_204" + input: "report_uninitialized_variables/VarIsInitializedOp_205" + input: "report_uninitialized_variables/VarIsInitializedOp_206" + input: "report_uninitialized_variables/VarIsInitializedOp_207" + input: "report_uninitialized_variables/VarIsInitializedOp_208" + input: "report_uninitialized_variables/VarIsInitializedOp_209" + input: "report_uninitialized_variables/VarIsInitializedOp_210" + input: "report_uninitialized_variables/VarIsInitializedOp_211" + input: "report_uninitialized_variables/VarIsInitializedOp_212" + input: "report_uninitialized_variables/VarIsInitializedOp_213" + input: "report_uninitialized_variables/VarIsInitializedOp_214" + input: "report_uninitialized_variables/VarIsInitializedOp_215" + input: "report_uninitialized_variables/VarIsInitializedOp_216" + input: "report_uninitialized_variables/VarIsInitializedOp_217" + input: "report_uninitialized_variables/VarIsInitializedOp_218" + input: "report_uninitialized_variables/VarIsInitializedOp_219" + input: "report_uninitialized_variables/VarIsInitializedOp_220" + input: "report_uninitialized_variables/VarIsInitializedOp_221" + input: "report_uninitialized_variables/VarIsInitializedOp_222" + input: "report_uninitialized_variables/VarIsInitializedOp_223" + input: "report_uninitialized_variables/VarIsInitializedOp_224" + input: "report_uninitialized_variables/VarIsInitializedOp_225" + input: "report_uninitialized_variables/VarIsInitializedOp_226" + input: "report_uninitialized_variables/VarIsInitializedOp_227" + input: "report_uninitialized_variables/VarIsInitializedOp_228" + input: "report_uninitialized_variables/VarIsInitializedOp_229" + input: "report_uninitialized_variables/VarIsInitializedOp_230" + input: "report_uninitialized_variables/VarIsInitializedOp_231" + input: "report_uninitialized_variables/VarIsInitializedOp_232" + input: "report_uninitialized_variables/VarIsInitializedOp_233" + input: "report_uninitialized_variables/VarIsInitializedOp_234" + input: "report_uninitialized_variables/VarIsInitializedOp_235" + input: "report_uninitialized_variables/VarIsInitializedOp_236" + input: "report_uninitialized_variables/VarIsInitializedOp_237" + input: "report_uninitialized_variables/VarIsInitializedOp_238" + input: "report_uninitialized_variables/VarIsInitializedOp_239" + input: "report_uninitialized_variables/VarIsInitializedOp_240" + input: "report_uninitialized_variables/VarIsInitializedOp_241" + input: "report_uninitialized_variables/VarIsInitializedOp_242" + input: "report_uninitialized_variables/VarIsInitializedOp_243" + input: "report_uninitialized_variables/VarIsInitializedOp_244" + input: "report_uninitialized_variables/VarIsInitializedOp_245" + input: "report_uninitialized_variables/VarIsInitializedOp_246" + input: "report_uninitialized_variables/VarIsInitializedOp_247" + input: "report_uninitialized_variables/VarIsInitializedOp_248" + input: "report_uninitialized_variables/VarIsInitializedOp_249" + input: "report_uninitialized_variables/VarIsInitializedOp_250" + input: "report_uninitialized_variables/VarIsInitializedOp_251" + input: "report_uninitialized_variables/VarIsInitializedOp_252" + input: "report_uninitialized_variables/VarIsInitializedOp_253" + input: "report_uninitialized_variables/VarIsInitializedOp_254" + input: "report_uninitialized_variables/VarIsInitializedOp_255" + input: "report_uninitialized_variables/VarIsInitializedOp_256" + input: "report_uninitialized_variables/VarIsInitializedOp_257" + input: "report_uninitialized_variables/VarIsInitializedOp_258" + input: "report_uninitialized_variables/VarIsInitializedOp_259" + input: "report_uninitialized_variables/VarIsInitializedOp_260" + input: "report_uninitialized_variables/VarIsInitializedOp_261" + input: "report_uninitialized_variables/VarIsInitializedOp_262" + input: "report_uninitialized_variables/VarIsInitializedOp_263" + input: "report_uninitialized_variables/VarIsInitializedOp_264" + input: "report_uninitialized_variables/VarIsInitializedOp_265" + input: "report_uninitialized_variables/VarIsInitializedOp_266" + input: "report_uninitialized_variables/VarIsInitializedOp_267" + input: "report_uninitialized_variables/VarIsInitializedOp_268" + input: "report_uninitialized_variables/VarIsInitializedOp_269" + input: "report_uninitialized_variables/VarIsInitializedOp_270" + input: "report_uninitialized_variables/VarIsInitializedOp_271" + input: "report_uninitialized_variables/VarIsInitializedOp_272" + input: "report_uninitialized_variables/VarIsInitializedOp_273" + input: "report_uninitialized_variables/VarIsInitializedOp_274" + input: "report_uninitialized_variables/VarIsInitializedOp_275" + input: "report_uninitialized_variables/VarIsInitializedOp_276" + input: "report_uninitialized_variables/VarIsInitializedOp_277" + input: "report_uninitialized_variables/VarIsInitializedOp_278" + input: "report_uninitialized_variables/VarIsInitializedOp_279" + input: "report_uninitialized_variables/VarIsInitializedOp_280" + input: "report_uninitialized_variables/VarIsInitializedOp_281" + input: "report_uninitialized_variables/VarIsInitializedOp_282" + input: "report_uninitialized_variables/VarIsInitializedOp_283" + input: "report_uninitialized_variables/VarIsInitializedOp_284" + input: "report_uninitialized_variables/VarIsInitializedOp_285" + input: "report_uninitialized_variables/VarIsInitializedOp_286" + input: "report_uninitialized_variables/VarIsInitializedOp_287" + input: "report_uninitialized_variables/VarIsInitializedOp_288" + input: "report_uninitialized_variables/VarIsInitializedOp_289" + input: "report_uninitialized_variables/VarIsInitializedOp_290" + input: "report_uninitialized_variables/VarIsInitializedOp_291" + input: "report_uninitialized_variables/VarIsInitializedOp_292" + input: "report_uninitialized_variables/VarIsInitializedOp_293" + input: "report_uninitialized_variables/VarIsInitializedOp_294" + input: "report_uninitialized_variables/VarIsInitializedOp_295" + input: "report_uninitialized_variables/VarIsInitializedOp_296" + input: "report_uninitialized_variables/VarIsInitializedOp_297" + input: "report_uninitialized_variables/VarIsInitializedOp_298" + input: "report_uninitialized_variables/VarIsInitializedOp_299" + input: "report_uninitialized_variables/VarIsInitializedOp_300" + input: "report_uninitialized_variables/VarIsInitializedOp_301" + input: "report_uninitialized_variables/VarIsInitializedOp_302" + input: "report_uninitialized_variables/VarIsInitializedOp_303" + input: "report_uninitialized_variables/VarIsInitializedOp_304" + input: "report_uninitialized_variables/VarIsInitializedOp_305" + input: "report_uninitialized_variables/VarIsInitializedOp_306" + input: "report_uninitialized_variables/VarIsInitializedOp_307" + input: "report_uninitialized_variables/VarIsInitializedOp_308" + input: "report_uninitialized_variables/VarIsInitializedOp_309" + input: "report_uninitialized_variables/VarIsInitializedOp_310" + input: "report_uninitialized_variables/VarIsInitializedOp_311" + input: "report_uninitialized_variables/VarIsInitializedOp_312" + input: "report_uninitialized_variables/VarIsInitializedOp_313" + input: "report_uninitialized_variables/VarIsInitializedOp_314" + input: "report_uninitialized_variables/VarIsInitializedOp_315" + input: "report_uninitialized_variables/VarIsInitializedOp_316" + input: "report_uninitialized_variables/VarIsInitializedOp_317" + input: "report_uninitialized_variables/VarIsInitializedOp_318" + input: "report_uninitialized_variables/VarIsInitializedOp_319" + input: "report_uninitialized_variables/VarIsInitializedOp_320" + input: "report_uninitialized_variables/VarIsInitializedOp_321" + input: "report_uninitialized_variables/VarIsInitializedOp_322" + input: "report_uninitialized_variables/VarIsInitializedOp_323" + input: "report_uninitialized_variables/VarIsInitializedOp_324" + input: "report_uninitialized_variables/VarIsInitializedOp_325" + input: "report_uninitialized_variables/VarIsInitializedOp_326" + input: "report_uninitialized_variables/VarIsInitializedOp_327" + input: "report_uninitialized_variables/VarIsInitializedOp_328" + input: "report_uninitialized_variables/VarIsInitializedOp_329" + input: "report_uninitialized_variables/VarIsInitializedOp_330" + input: "report_uninitialized_variables/VarIsInitializedOp_331" + input: "report_uninitialized_variables/VarIsInitializedOp_332" + input: "report_uninitialized_variables/VarIsInitializedOp_333" + input: "report_uninitialized_variables/VarIsInitializedOp_334" + input: "report_uninitialized_variables/VarIsInitializedOp_335" + input: "report_uninitialized_variables/VarIsInitializedOp_336" + input: "report_uninitialized_variables/VarIsInitializedOp_337" + input: "report_uninitialized_variables/VarIsInitializedOp_338" + input: "report_uninitialized_variables/VarIsInitializedOp_339" + input: "report_uninitialized_variables/VarIsInitializedOp_340" + input: "report_uninitialized_variables/VarIsInitializedOp_341" + input: "report_uninitialized_variables/VarIsInitializedOp_342" + input: "report_uninitialized_variables/VarIsInitializedOp_343" + input: "report_uninitialized_variables/VarIsInitializedOp_344" + input: "report_uninitialized_variables/VarIsInitializedOp_345" + input: "report_uninitialized_variables/VarIsInitializedOp_346" + input: "report_uninitialized_variables/VarIsInitializedOp_347" + input: "report_uninitialized_variables/VarIsInitializedOp_348" + input: "report_uninitialized_variables/VarIsInitializedOp_349" + input: "report_uninitialized_variables/VarIsInitializedOp_350" + input: "report_uninitialized_variables/VarIsInitializedOp_351" + input: "report_uninitialized_variables/VarIsInitializedOp_352" + input: "report_uninitialized_variables/VarIsInitializedOp_353" + input: "report_uninitialized_variables/VarIsInitializedOp_354" + input: "report_uninitialized_variables/VarIsInitializedOp_355" + input: "report_uninitialized_variables/VarIsInitializedOp_356" + input: "report_uninitialized_variables/VarIsInitializedOp_357" + input: "report_uninitialized_variables/VarIsInitializedOp_358" + input: "report_uninitialized_variables/VarIsInitializedOp_359" + input: "report_uninitialized_variables/VarIsInitializedOp_360" + input: "report_uninitialized_variables/VarIsInitializedOp_361" + input: "report_uninitialized_variables/VarIsInitializedOp_362" + input: "report_uninitialized_variables/VarIsInitializedOp_363" + input: "report_uninitialized_variables/VarIsInitializedOp_364" + input: "report_uninitialized_variables/VarIsInitializedOp_365" + input: "report_uninitialized_variables/VarIsInitializedOp_366" + input: "report_uninitialized_variables/VarIsInitializedOp_367" + input: "report_uninitialized_variables/VarIsInitializedOp_368" + input: "report_uninitialized_variables/VarIsInitializedOp_369" + input: "report_uninitialized_variables/VarIsInitializedOp_370" + input: "report_uninitialized_variables/VarIsInitializedOp_371" + input: "report_uninitialized_variables/VarIsInitializedOp_372" + input: "report_uninitialized_variables/VarIsInitializedOp_373" + input: "report_uninitialized_variables/VarIsInitializedOp_374" + input: "report_uninitialized_variables/VarIsInitializedOp_375" + input: "report_uninitialized_variables/VarIsInitializedOp_376" + input: "report_uninitialized_variables/VarIsInitializedOp_377" + input: "report_uninitialized_variables/VarIsInitializedOp_378" + input: "report_uninitialized_variables/VarIsInitializedOp_379" + input: "report_uninitialized_variables/VarIsInitializedOp_380" + input: "report_uninitialized_variables/VarIsInitializedOp_381" + input: "report_uninitialized_variables/VarIsInitializedOp_382" + input: "report_uninitialized_variables/VarIsInitializedOp_383" + input: "report_uninitialized_variables/VarIsInitializedOp_384" + input: "report_uninitialized_variables/VarIsInitializedOp_385" + input: "report_uninitialized_variables/VarIsInitializedOp_386" + input: "report_uninitialized_variables/VarIsInitializedOp_387" + input: "report_uninitialized_variables/VarIsInitializedOp_388" + input: "report_uninitialized_variables/VarIsInitializedOp_389" + input: "report_uninitialized_variables/VarIsInitializedOp_390" + input: "report_uninitialized_variables/VarIsInitializedOp_391" + input: "report_uninitialized_variables/VarIsInitializedOp_392" + input: "report_uninitialized_variables/VarIsInitializedOp_393" + input: "report_uninitialized_variables/VarIsInitializedOp_394" + input: "report_uninitialized_variables/VarIsInitializedOp_395" + input: "report_uninitialized_variables/VarIsInitializedOp_396" + input: "report_uninitialized_variables/VarIsInitializedOp_397" + input: "report_uninitialized_variables/VarIsInitializedOp_398" + input: "report_uninitialized_variables/VarIsInitializedOp_399" + input: "report_uninitialized_variables/VarIsInitializedOp_400" + input: "report_uninitialized_variables/VarIsInitializedOp_401" + input: "report_uninitialized_variables/VarIsInitializedOp_402" + input: "report_uninitialized_variables/VarIsInitializedOp_403" + input: "report_uninitialized_variables/VarIsInitializedOp_404" + input: "report_uninitialized_variables/VarIsInitializedOp_405" + input: "report_uninitialized_variables/VarIsInitializedOp_406" + input: "report_uninitialized_variables/VarIsInitializedOp_407" + input: "report_uninitialized_variables/VarIsInitializedOp_408" + input: "report_uninitialized_variables/VarIsInitializedOp_409" + input: "report_uninitialized_variables/VarIsInitializedOp_410" + input: "report_uninitialized_variables/VarIsInitializedOp_411" + input: "report_uninitialized_variables/VarIsInitializedOp_412" + input: "report_uninitialized_variables/VarIsInitializedOp_413" + input: "report_uninitialized_variables/VarIsInitializedOp_414" + input: "report_uninitialized_variables/VarIsInitializedOp_415" + input: "report_uninitialized_variables/VarIsInitializedOp_416" + input: "report_uninitialized_variables/VarIsInitializedOp_417" + input: "report_uninitialized_variables/VarIsInitializedOp_418" + input: "report_uninitialized_variables/VarIsInitializedOp_419" + input: "report_uninitialized_variables/VarIsInitializedOp_420" + input: "report_uninitialized_variables/VarIsInitializedOp_421" + input: "report_uninitialized_variables/VarIsInitializedOp_422" + input: "report_uninitialized_variables/VarIsInitializedOp_423" + input: "report_uninitialized_variables/VarIsInitializedOp_424" + input: "report_uninitialized_variables/VarIsInitializedOp_425" + input: "report_uninitialized_variables/VarIsInitializedOp_426" + input: "report_uninitialized_variables/VarIsInitializedOp_427" + input: "report_uninitialized_variables/VarIsInitializedOp_428" + input: "report_uninitialized_variables/VarIsInitializedOp_429" + input: "report_uninitialized_variables/VarIsInitializedOp_430" + input: "report_uninitialized_variables/VarIsInitializedOp_431" + input: "report_uninitialized_variables/VarIsInitializedOp_432" + input: "report_uninitialized_variables/VarIsInitializedOp_433" + input: "report_uninitialized_variables/VarIsInitializedOp_434" + input: "report_uninitialized_variables/VarIsInitializedOp_435" + input: "report_uninitialized_variables/VarIsInitializedOp_436" + input: "report_uninitialized_variables/VarIsInitializedOp_437" + input: "report_uninitialized_variables/VarIsInitializedOp_438" + input: "report_uninitialized_variables/VarIsInitializedOp_439" + input: "report_uninitialized_variables/VarIsInitializedOp_440" + input: "report_uninitialized_variables/VarIsInitializedOp_441" + input: "report_uninitialized_variables/VarIsInitializedOp_442" + input: "report_uninitialized_variables/VarIsInitializedOp_443" + input: "report_uninitialized_variables/VarIsInitializedOp_444" + input: "report_uninitialized_variables/VarIsInitializedOp_445" + input: "report_uninitialized_variables/VarIsInitializedOp_446" + input: "report_uninitialized_variables/VarIsInitializedOp_447" + input: "report_uninitialized_variables/VarIsInitializedOp_448" + input: "report_uninitialized_variables/VarIsInitializedOp_449" + input: "report_uninitialized_variables/VarIsInitializedOp_450" + input: "report_uninitialized_variables/VarIsInitializedOp_451" + input: "report_uninitialized_variables/VarIsInitializedOp_452" + input: "report_uninitialized_variables/VarIsInitializedOp_453" + input: "report_uninitialized_variables/VarIsInitializedOp_454" + input: "report_uninitialized_variables/VarIsInitializedOp_455" + input: "report_uninitialized_variables/VarIsInitializedOp_456" + input: "report_uninitialized_variables/VarIsInitializedOp_457" + input: "report_uninitialized_variables/VarIsInitializedOp_458" + input: "report_uninitialized_variables/VarIsInitializedOp_459" + input: "report_uninitialized_variables/VarIsInitializedOp_460" + input: "report_uninitialized_variables/VarIsInitializedOp_461" + input: "report_uninitialized_variables/VarIsInitializedOp_462" + input: "report_uninitialized_variables/VarIsInitializedOp_463" + input: "report_uninitialized_variables/VarIsInitializedOp_464" + input: "report_uninitialized_variables/VarIsInitializedOp_465" + input: "report_uninitialized_variables/VarIsInitializedOp_466" + input: "report_uninitialized_variables/VarIsInitializedOp_467" + input: "report_uninitialized_variables/VarIsInitializedOp_468" + input: "report_uninitialized_variables/VarIsInitializedOp_469" + input: "report_uninitialized_variables/VarIsInitializedOp_470" + input: "report_uninitialized_variables/VarIsInitializedOp_471" + input: "report_uninitialized_variables/VarIsInitializedOp_472" + input: "report_uninitialized_variables/VarIsInitializedOp_473" + input: "report_uninitialized_variables/VarIsInitializedOp_474" + input: "report_uninitialized_variables/VarIsInitializedOp_475" + input: "report_uninitialized_variables/VarIsInitializedOp_476" + input: "report_uninitialized_variables/VarIsInitializedOp_477" + input: "report_uninitialized_variables/VarIsInitializedOp_478" + input: "report_uninitialized_variables/VarIsInitializedOp_479" + input: "report_uninitialized_variables/VarIsInitializedOp_480" + input: "report_uninitialized_variables/VarIsInitializedOp_481" + input: "report_uninitialized_variables/VarIsInitializedOp_482" + input: "report_uninitialized_variables/VarIsInitializedOp_483" + input: "report_uninitialized_variables/VarIsInitializedOp_484" + input: "report_uninitialized_variables/VarIsInitializedOp_485" + input: "report_uninitialized_variables/VarIsInitializedOp_486" + input: "report_uninitialized_variables/VarIsInitializedOp_487" + input: "report_uninitialized_variables/VarIsInitializedOp_488" + input: "report_uninitialized_variables/VarIsInitializedOp_489" + input: "report_uninitialized_variables/VarIsInitializedOp_490" + input: "report_uninitialized_variables/VarIsInitializedOp_491" + input: "report_uninitialized_variables/VarIsInitializedOp_492" + input: "report_uninitialized_variables/VarIsInitializedOp_493" + input: "report_uninitialized_variables/VarIsInitializedOp_494" + input: "report_uninitialized_variables/VarIsInitializedOp_495" + input: "report_uninitialized_variables/VarIsInitializedOp_496" + input: "report_uninitialized_variables/VarIsInitializedOp_497" + input: "report_uninitialized_variables/VarIsInitializedOp_498" + input: "report_uninitialized_variables/VarIsInitializedOp_499" + input: "report_uninitialized_variables/VarIsInitializedOp_500" + input: "report_uninitialized_variables/VarIsInitializedOp_501" + input: "report_uninitialized_variables/VarIsInitializedOp_502" + input: "report_uninitialized_variables/VarIsInitializedOp_503" + input: "report_uninitialized_variables/VarIsInitializedOp_504" + input: "report_uninitialized_variables/VarIsInitializedOp_505" + input: "report_uninitialized_variables/VarIsInitializedOp_506" + input: "report_uninitialized_variables/VarIsInitializedOp_507" + input: "report_uninitialized_variables/VarIsInitializedOp_508" + input: "report_uninitialized_variables/VarIsInitializedOp_509" + input: "report_uninitialized_variables/VarIsInitializedOp_510" + input: "report_uninitialized_variables/VarIsInitializedOp_511" + input: "report_uninitialized_variables/VarIsInitializedOp_512" + input: "report_uninitialized_variables/VarIsInitializedOp_513" + input: "report_uninitialized_variables/VarIsInitializedOp_514" + input: "report_uninitialized_variables/VarIsInitializedOp_515" + input: "report_uninitialized_variables/VarIsInitializedOp_516" + input: "report_uninitialized_variables/VarIsInitializedOp_517" + input: "report_uninitialized_variables/VarIsInitializedOp_518" + input: "report_uninitialized_variables/VarIsInitializedOp_519" + input: "report_uninitialized_variables/VarIsInitializedOp_520" + input: "report_uninitialized_variables/VarIsInitializedOp_521" + input: "report_uninitialized_variables/VarIsInitializedOp_522" + input: "report_uninitialized_variables/VarIsInitializedOp_523" + input: "report_uninitialized_variables/VarIsInitializedOp_524" + input: "report_uninitialized_variables/VarIsInitializedOp_525" + input: "report_uninitialized_variables/VarIsInitializedOp_526" + input: "report_uninitialized_variables/VarIsInitializedOp_527" + input: "report_uninitialized_variables/VarIsInitializedOp_528" + input: "report_uninitialized_variables/VarIsInitializedOp_529" + input: "report_uninitialized_variables/VarIsInitializedOp_530" + input: "report_uninitialized_variables/VarIsInitializedOp_531" + input: "report_uninitialized_variables/VarIsInitializedOp_532" + input: "report_uninitialized_variables/VarIsInitializedOp_533" + input: "report_uninitialized_variables/VarIsInitializedOp_534" + input: "report_uninitialized_variables/VarIsInitializedOp_535" + input: "report_uninitialized_variables/VarIsInitializedOp_536" + input: "report_uninitialized_variables/VarIsInitializedOp_537" + input: "report_uninitialized_variables/VarIsInitializedOp_538" + input: "report_uninitialized_variables/VarIsInitializedOp_539" + input: "report_uninitialized_variables/VarIsInitializedOp_540" + input: "report_uninitialized_variables/VarIsInitializedOp_541" + input: "report_uninitialized_variables/VarIsInitializedOp_542" + input: "report_uninitialized_variables/VarIsInitializedOp_543" + input: "report_uninitialized_variables/VarIsInitializedOp_544" + input: "report_uninitialized_variables/VarIsInitializedOp_545" + input: "report_uninitialized_variables/VarIsInitializedOp_546" + input: "report_uninitialized_variables/VarIsInitializedOp_547" + input: "report_uninitialized_variables/VarIsInitializedOp_548" + input: "report_uninitialized_variables/VarIsInitializedOp_549" + input: "report_uninitialized_variables/VarIsInitializedOp_550" + input: "report_uninitialized_variables/VarIsInitializedOp_551" + input: "report_uninitialized_variables/VarIsInitializedOp_552" + input: "report_uninitialized_variables/VarIsInitializedOp_553" + input: "report_uninitialized_variables/VarIsInitializedOp_554" + input: "report_uninitialized_variables/VarIsInitializedOp_555" + input: "report_uninitialized_variables/VarIsInitializedOp_556" + input: "report_uninitialized_variables/VarIsInitializedOp_557" + input: "report_uninitialized_variables/VarIsInitializedOp_558" + input: "report_uninitialized_variables/VarIsInitializedOp_559" + input: "report_uninitialized_variables/VarIsInitializedOp_560" + input: "report_uninitialized_variables/VarIsInitializedOp_561" + input: "report_uninitialized_variables/VarIsInitializedOp_562" + input: "report_uninitialized_variables/VarIsInitializedOp_563" + input: "report_uninitialized_variables/VarIsInitializedOp_564" + input: "report_uninitialized_variables/VarIsInitializedOp_565" + input: "report_uninitialized_variables/VarIsInitializedOp_566" + input: "report_uninitialized_variables/VarIsInitializedOp_567" + input: "report_uninitialized_variables/VarIsInitializedOp_568" + input: "report_uninitialized_variables/VarIsInitializedOp_569" + input: "report_uninitialized_variables/VarIsInitializedOp_570" + input: "report_uninitialized_variables/VarIsInitializedOp_571" + input: "report_uninitialized_variables/VarIsInitializedOp_572" + input: "report_uninitialized_variables/VarIsInitializedOp_573" + input: "report_uninitialized_variables/VarIsInitializedOp_574" + input: "report_uninitialized_variables/VarIsInitializedOp_575" + input: "report_uninitialized_variables/VarIsInitializedOp_576" + input: "report_uninitialized_variables/VarIsInitializedOp_577" + input: "report_uninitialized_variables/VarIsInitializedOp_578" + input: "report_uninitialized_variables/VarIsInitializedOp_579" + input: "report_uninitialized_variables/VarIsInitializedOp_580" + input: "report_uninitialized_variables/VarIsInitializedOp_581" + input: "report_uninitialized_variables/VarIsInitializedOp_582" + input: "report_uninitialized_variables/VarIsInitializedOp_583" + input: "report_uninitialized_variables/VarIsInitializedOp_584" + input: "report_uninitialized_variables/VarIsInitializedOp_585" + input: "report_uninitialized_variables/VarIsInitializedOp_586" + input: "report_uninitialized_variables/VarIsInitializedOp_587" + input: "report_uninitialized_variables/VarIsInitializedOp_588" + input: "report_uninitialized_variables/VarIsInitializedOp_589" + input: "report_uninitialized_variables/VarIsInitializedOp_590" + input: "report_uninitialized_variables/VarIsInitializedOp_591" + input: "report_uninitialized_variables/VarIsInitializedOp_592" + input: "report_uninitialized_variables/VarIsInitializedOp_593" + input: "report_uninitialized_variables/VarIsInitializedOp_594" + input: "report_uninitialized_variables/VarIsInitializedOp_595" + input: "report_uninitialized_variables/VarIsInitializedOp_596" + input: "report_uninitialized_variables/VarIsInitializedOp_597" + input: "report_uninitialized_variables/VarIsInitializedOp_598" + input: "report_uninitialized_variables/VarIsInitializedOp_599" + device: "/device:CPU:0" + attr { + key: "N" + value { + i: 600 + } + } + attr { + key: "T" + value { + type: DT_BOOL + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 600 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "report_uninitialized_variables/LogicalNot" + op: "LogicalNot" + input: "report_uninitialized_variables/stack" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 600 + } + } + } + } + } +} +node { + name: "report_uninitialized_variables/Const" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 600 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 600 + } + } + string_val: "global_step" + string_val: "bert/embeddings/word_embeddings" + string_val: "bert/embeddings/token_type_embeddings" + string_val: "bert/embeddings/position_embeddings" + string_val: "bert/embeddings/layer_normalization/gamma" + string_val: "bert/embeddings/layer_normalization/beta" + string_val: "bert/encoder/layer_0/attention/self/query/kernel" + string_val: "bert/encoder/layer_0/attention/self/query/bias" + string_val: "bert/encoder/layer_0/attention/self/key/kernel" + string_val: "bert/encoder/layer_0/attention/self/key/bias" + string_val: "bert/encoder/layer_0/attention/self/value/kernel" + string_val: "bert/encoder/layer_0/attention/self/value/bias" + string_val: "bert/encoder/layer_0/attention/output/dense/kernel" + string_val: "bert/encoder/layer_0/attention/output/dense/bias" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta" + string_val: "bert/encoder/layer_0/intermediate/dense/kernel" + string_val: "bert/encoder/layer_0/intermediate/dense/bias" + string_val: "bert/encoder/layer_0/output/dense/kernel" + string_val: "bert/encoder/layer_0/output/dense/bias" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/gamma" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/beta" + string_val: "bert/encoder/layer_1/attention/self/query/kernel" + string_val: "bert/encoder/layer_1/attention/self/query/bias" + string_val: "bert/encoder/layer_1/attention/self/key/kernel" + string_val: "bert/encoder/layer_1/attention/self/key/bias" + string_val: "bert/encoder/layer_1/attention/self/value/kernel" + string_val: "bert/encoder/layer_1/attention/self/value/bias" + string_val: "bert/encoder/layer_1/attention/output/dense/kernel" + string_val: "bert/encoder/layer_1/attention/output/dense/bias" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta" + string_val: "bert/encoder/layer_1/intermediate/dense/kernel" + string_val: "bert/encoder/layer_1/intermediate/dense/bias" + string_val: "bert/encoder/layer_1/output/dense/kernel" + string_val: "bert/encoder/layer_1/output/dense/bias" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/gamma" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/beta" + string_val: "bert/encoder/layer_2/attention/self/query/kernel" + string_val: "bert/encoder/layer_2/attention/self/query/bias" + string_val: "bert/encoder/layer_2/attention/self/key/kernel" + string_val: "bert/encoder/layer_2/attention/self/key/bias" + string_val: "bert/encoder/layer_2/attention/self/value/kernel" + string_val: "bert/encoder/layer_2/attention/self/value/bias" + string_val: "bert/encoder/layer_2/attention/output/dense/kernel" + string_val: "bert/encoder/layer_2/attention/output/dense/bias" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta" + string_val: "bert/encoder/layer_2/intermediate/dense/kernel" + string_val: "bert/encoder/layer_2/intermediate/dense/bias" + string_val: "bert/encoder/layer_2/output/dense/kernel" + string_val: "bert/encoder/layer_2/output/dense/bias" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/gamma" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/beta" + string_val: "bert/encoder/layer_3/attention/self/query/kernel" + string_val: "bert/encoder/layer_3/attention/self/query/bias" + string_val: "bert/encoder/layer_3/attention/self/key/kernel" + string_val: "bert/encoder/layer_3/attention/self/key/bias" + string_val: "bert/encoder/layer_3/attention/self/value/kernel" + string_val: "bert/encoder/layer_3/attention/self/value/bias" + string_val: "bert/encoder/layer_3/attention/output/dense/kernel" + string_val: "bert/encoder/layer_3/attention/output/dense/bias" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta" + string_val: "bert/encoder/layer_3/intermediate/dense/kernel" + string_val: "bert/encoder/layer_3/intermediate/dense/bias" + string_val: "bert/encoder/layer_3/output/dense/kernel" + string_val: "bert/encoder/layer_3/output/dense/bias" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/gamma" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/beta" + string_val: "bert/encoder/layer_4/attention/self/query/kernel" + string_val: "bert/encoder/layer_4/attention/self/query/bias" + string_val: "bert/encoder/layer_4/attention/self/key/kernel" + string_val: "bert/encoder/layer_4/attention/self/key/bias" + string_val: "bert/encoder/layer_4/attention/self/value/kernel" + string_val: "bert/encoder/layer_4/attention/self/value/bias" + string_val: "bert/encoder/layer_4/attention/output/dense/kernel" + string_val: "bert/encoder/layer_4/attention/output/dense/bias" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta" + string_val: "bert/encoder/layer_4/intermediate/dense/kernel" + string_val: "bert/encoder/layer_4/intermediate/dense/bias" + string_val: "bert/encoder/layer_4/output/dense/kernel" + string_val: "bert/encoder/layer_4/output/dense/bias" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/gamma" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/beta" + string_val: "bert/encoder/layer_5/attention/self/query/kernel" + string_val: "bert/encoder/layer_5/attention/self/query/bias" + string_val: "bert/encoder/layer_5/attention/self/key/kernel" + string_val: "bert/encoder/layer_5/attention/self/key/bias" + string_val: "bert/encoder/layer_5/attention/self/value/kernel" + string_val: "bert/encoder/layer_5/attention/self/value/bias" + string_val: "bert/encoder/layer_5/attention/output/dense/kernel" + string_val: "bert/encoder/layer_5/attention/output/dense/bias" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta" + string_val: "bert/encoder/layer_5/intermediate/dense/kernel" + string_val: "bert/encoder/layer_5/intermediate/dense/bias" + string_val: "bert/encoder/layer_5/output/dense/kernel" + string_val: "bert/encoder/layer_5/output/dense/bias" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/gamma" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/beta" + string_val: "bert/encoder/layer_6/attention/self/query/kernel" + string_val: "bert/encoder/layer_6/attention/self/query/bias" + string_val: "bert/encoder/layer_6/attention/self/key/kernel" + string_val: "bert/encoder/layer_6/attention/self/key/bias" + string_val: "bert/encoder/layer_6/attention/self/value/kernel" + string_val: "bert/encoder/layer_6/attention/self/value/bias" + string_val: "bert/encoder/layer_6/attention/output/dense/kernel" + string_val: "bert/encoder/layer_6/attention/output/dense/bias" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta" + string_val: "bert/encoder/layer_6/intermediate/dense/kernel" + string_val: "bert/encoder/layer_6/intermediate/dense/bias" + string_val: "bert/encoder/layer_6/output/dense/kernel" + string_val: "bert/encoder/layer_6/output/dense/bias" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/gamma" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/beta" + string_val: "bert/encoder/layer_7/attention/self/query/kernel" + string_val: "bert/encoder/layer_7/attention/self/query/bias" + string_val: "bert/encoder/layer_7/attention/self/key/kernel" + string_val: "bert/encoder/layer_7/attention/self/key/bias" + string_val: "bert/encoder/layer_7/attention/self/value/kernel" + string_val: "bert/encoder/layer_7/attention/self/value/bias" + string_val: "bert/encoder/layer_7/attention/output/dense/kernel" + string_val: "bert/encoder/layer_7/attention/output/dense/bias" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta" + string_val: "bert/encoder/layer_7/intermediate/dense/kernel" + string_val: "bert/encoder/layer_7/intermediate/dense/bias" + string_val: "bert/encoder/layer_7/output/dense/kernel" + string_val: "bert/encoder/layer_7/output/dense/bias" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/gamma" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/beta" + string_val: "bert/encoder/layer_8/attention/self/query/kernel" + string_val: "bert/encoder/layer_8/attention/self/query/bias" + string_val: "bert/encoder/layer_8/attention/self/key/kernel" + string_val: "bert/encoder/layer_8/attention/self/key/bias" + string_val: "bert/encoder/layer_8/attention/self/value/kernel" + string_val: "bert/encoder/layer_8/attention/self/value/bias" + string_val: "bert/encoder/layer_8/attention/output/dense/kernel" + string_val: "bert/encoder/layer_8/attention/output/dense/bias" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta" + string_val: "bert/encoder/layer_8/intermediate/dense/kernel" + string_val: "bert/encoder/layer_8/intermediate/dense/bias" + string_val: "bert/encoder/layer_8/output/dense/kernel" + string_val: "bert/encoder/layer_8/output/dense/bias" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/gamma" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/beta" + string_val: "bert/encoder/layer_9/attention/self/query/kernel" + string_val: "bert/encoder/layer_9/attention/self/query/bias" + string_val: "bert/encoder/layer_9/attention/self/key/kernel" + string_val: "bert/encoder/layer_9/attention/self/key/bias" + string_val: "bert/encoder/layer_9/attention/self/value/kernel" + string_val: "bert/encoder/layer_9/attention/self/value/bias" + string_val: "bert/encoder/layer_9/attention/output/dense/kernel" + string_val: "bert/encoder/layer_9/attention/output/dense/bias" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta" + string_val: "bert/encoder/layer_9/intermediate/dense/kernel" + string_val: "bert/encoder/layer_9/intermediate/dense/bias" + string_val: "bert/encoder/layer_9/output/dense/kernel" + string_val: "bert/encoder/layer_9/output/dense/bias" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/gamma" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/beta" + string_val: "bert/encoder/layer_10/attention/self/query/kernel" + string_val: "bert/encoder/layer_10/attention/self/query/bias" + string_val: "bert/encoder/layer_10/attention/self/key/kernel" + string_val: "bert/encoder/layer_10/attention/self/key/bias" + string_val: "bert/encoder/layer_10/attention/self/value/kernel" + string_val: "bert/encoder/layer_10/attention/self/value/bias" + string_val: "bert/encoder/layer_10/attention/output/dense/kernel" + string_val: "bert/encoder/layer_10/attention/output/dense/bias" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta" + string_val: "bert/encoder/layer_10/intermediate/dense/kernel" + string_val: "bert/encoder/layer_10/intermediate/dense/bias" + string_val: "bert/encoder/layer_10/output/dense/kernel" + string_val: "bert/encoder/layer_10/output/dense/bias" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/gamma" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/beta" + string_val: "bert/encoder/layer_11/attention/self/query/kernel" + string_val: "bert/encoder/layer_11/attention/self/query/bias" + string_val: "bert/encoder/layer_11/attention/self/key/kernel" + string_val: "bert/encoder/layer_11/attention/self/key/bias" + string_val: "bert/encoder/layer_11/attention/self/value/kernel" + string_val: "bert/encoder/layer_11/attention/self/value/bias" + string_val: "bert/encoder/layer_11/attention/output/dense/kernel" + string_val: "bert/encoder/layer_11/attention/output/dense/bias" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta" + string_val: "bert/encoder/layer_11/intermediate/dense/kernel" + string_val: "bert/encoder/layer_11/intermediate/dense/bias" + string_val: "bert/encoder/layer_11/output/dense/kernel" + string_val: "bert/encoder/layer_11/output/dense/bias" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/gamma" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/beta" + string_val: "bert/pooler/dense/kernel" + string_val: "bert/pooler/dense/bias" + string_val: "cls/squad/output_weights" + string_val: "cls/squad/output_bias" + string_val: "bert/embeddings/word_embeddings/adam_m" + string_val: "bert/embeddings/word_embeddings/adam_v" + string_val: "bert/embeddings/token_type_embeddings/adam_m" + string_val: "bert/embeddings/token_type_embeddings/adam_v" + string_val: "bert/embeddings/position_embeddings/adam_m" + string_val: "bert/embeddings/position_embeddings/adam_v" + string_val: "bert/embeddings/layer_normalization/gamma/adam_m" + string_val: "bert/embeddings/layer_normalization/gamma/adam_v" + string_val: "bert/embeddings/layer_normalization/beta/adam_m" + string_val: "bert/embeddings/layer_normalization/beta/adam_v" + string_val: "bert/encoder/layer_0/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_0/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_0/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_0/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_0/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_0/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_0/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_0/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_0/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_0/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_0/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_0/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_0/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_0/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v" + string_val: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_0/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_0/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_0/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_0/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_0/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_0/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v" + string_val: "bert/encoder/layer_1/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_1/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_1/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_1/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_1/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_1/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_1/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_1/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_1/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_1/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_1/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_1/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_1/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_1/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v" + string_val: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_1/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_1/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_1/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_1/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_1/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_1/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v" + string_val: "bert/encoder/layer_2/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_2/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_2/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_2/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_2/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_2/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_2/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_2/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_2/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_2/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_2/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_2/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_2/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_2/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v" + string_val: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_2/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_2/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_2/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_2/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_2/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_2/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v" + string_val: "bert/encoder/layer_3/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_3/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_3/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_3/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_3/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_3/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_3/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_3/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_3/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_3/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_3/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_3/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_3/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_3/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v" + string_val: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_3/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_3/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_3/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_3/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_3/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_3/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v" + string_val: "bert/encoder/layer_4/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_4/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_4/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_4/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_4/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_4/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_4/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_4/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_4/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_4/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_4/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_4/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_4/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_4/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v" + string_val: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_4/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_4/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_4/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_4/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_4/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_4/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v" + string_val: "bert/encoder/layer_5/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_5/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_5/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_5/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_5/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_5/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_5/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_5/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_5/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_5/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_5/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_5/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_5/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_5/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v" + string_val: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_5/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_5/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_5/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_5/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_5/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_5/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v" + string_val: "bert/encoder/layer_6/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_6/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_6/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_6/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_6/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_6/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_6/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_6/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_6/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_6/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_6/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_6/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_6/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_6/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v" + string_val: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_6/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_6/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_6/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_6/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_6/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_6/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v" + string_val: "bert/encoder/layer_7/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_7/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_7/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_7/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_7/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_7/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_7/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_7/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_7/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_7/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_7/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_7/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_7/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_7/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v" + string_val: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_7/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_7/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_7/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_7/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_7/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_7/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v" + string_val: "bert/encoder/layer_8/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_8/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_8/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_8/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_8/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_8/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_8/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_8/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_8/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_8/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_8/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_8/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_8/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_8/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v" + string_val: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_8/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_8/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_8/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_8/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_8/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_8/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v" + string_val: "bert/encoder/layer_9/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_9/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_9/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_9/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_9/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_9/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_9/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_9/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_9/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_9/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_9/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_9/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_9/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_9/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v" + string_val: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_9/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_9/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_9/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_9/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_9/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_9/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v" + string_val: "bert/encoder/layer_10/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_10/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_10/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_10/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_10/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_10/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_10/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_10/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_10/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_10/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_10/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_10/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_10/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_10/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v" + string_val: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_10/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_10/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_10/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_10/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_10/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_10/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v" + string_val: "bert/encoder/layer_11/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_11/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_11/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_11/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_11/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_11/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_11/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_11/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_11/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_11/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_11/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_11/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_11/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_11/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v" + string_val: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_11/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_11/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_11/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_11/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_11/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_11/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v" + string_val: "cls/squad/output_weights/adam_m" + string_val: "cls/squad/output_weights/adam_v" + string_val: "cls/squad/output_bias/adam_m" + string_val: "cls/squad/output_bias/adam_v" + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/Shape" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 600 + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/strided_slice/stack" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/strided_slice/stack_1" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/strided_slice/stack_2" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/strided_slice" + op: "StridedSlice" + input: "report_uninitialized_variables/boolean_mask/Shape" + input: "report_uninitialized_variables/boolean_mask/strided_slice/stack" + input: "report_uninitialized_variables/boolean_mask/strided_slice/stack_1" + input: "report_uninitialized_variables/boolean_mask/strided_slice/stack_2" + device: "/device:CPU:0" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 0 + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/Prod/reduction_indices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/Prod" + op: "Prod" + input: "report_uninitialized_variables/boolean_mask/strided_slice" + input: "report_uninitialized_variables/boolean_mask/Prod/reduction_indices" + device: "/device:CPU:0" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/Shape_1" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 600 + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/strided_slice_1/stack" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/strided_slice_1/stack_1" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/strided_slice_1/stack_2" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/strided_slice_1" + op: "StridedSlice" + input: "report_uninitialized_variables/boolean_mask/Shape_1" + input: "report_uninitialized_variables/boolean_mask/strided_slice_1/stack" + input: "report_uninitialized_variables/boolean_mask/strided_slice_1/stack_1" + input: "report_uninitialized_variables/boolean_mask/strided_slice_1/stack_2" + device: "/device:CPU:0" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "begin_mask" + value { + i: 1 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 0 + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/Shape_2" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 600 + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/strided_slice_2/stack" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/strided_slice_2/stack_1" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/strided_slice_2/stack_2" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/strided_slice_2" + op: "StridedSlice" + input: "report_uninitialized_variables/boolean_mask/Shape_2" + input: "report_uninitialized_variables/boolean_mask/strided_slice_2/stack" + input: "report_uninitialized_variables/boolean_mask/strided_slice_2/stack_1" + input: "report_uninitialized_variables/boolean_mask/strided_slice_2/stack_2" + device: "/device:CPU:0" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 1 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 0 + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/concat/values_1" + op: "Pack" + input: "report_uninitialized_variables/boolean_mask/Prod" + device: "/device:CPU:0" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/concat/axis" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 0 + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/concat" + op: "ConcatV2" + input: "report_uninitialized_variables/boolean_mask/strided_slice_1" + input: "report_uninitialized_variables/boolean_mask/concat/values_1" + input: "report_uninitialized_variables/boolean_mask/strided_slice_2" + input: "report_uninitialized_variables/boolean_mask/concat/axis" + device: "/device:CPU:0" + attr { + key: "N" + value { + i: 3 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/Reshape" + op: "Reshape" + input: "report_uninitialized_variables/Const" + input: "report_uninitialized_variables/boolean_mask/concat" + device: "/device:CPU:0" + attr { + key: "T" + value { + type: DT_STRING + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 600 + } + } + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/Reshape_1/shape" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: -1 + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/Reshape_1" + op: "Reshape" + input: "report_uninitialized_variables/LogicalNot" + input: "report_uninitialized_variables/boolean_mask/Reshape_1/shape" + device: "/device:CPU:0" + attr { + key: "T" + value { + type: DT_BOOL + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 600 + } + } + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/Where" + op: "Where" + input: "report_uninitialized_variables/boolean_mask/Reshape_1" + device: "/device:CPU:0" + attr { + key: "T" + value { + type: DT_BOOL + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/Squeeze" + op: "Squeeze" + input: "report_uninitialized_variables/boolean_mask/Where" + device: "/device:CPU:0" + attr { + key: "T" + value { + type: DT_INT64 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + } + } + } + attr { + key: "squeeze_dims" + value { + list { + i: 1 + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/GatherV2/axis" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 0 + } + } + } +} +node { + name: "report_uninitialized_variables/boolean_mask/GatherV2" + op: "GatherV2" + input: "report_uninitialized_variables/boolean_mask/Reshape" + input: "report_uninitialized_variables/boolean_mask/Squeeze" + input: "report_uninitialized_variables/boolean_mask/GatherV2/axis" + device: "/device:CPU:0" + attr { + key: "Taxis" + value { + type: DT_INT32 + } + } + attr { + key: "Tindices" + value { + type: DT_INT64 + } + } + attr { + key: "Tparams" + value { + type: DT_STRING + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + } + } + } + attr { + key: "batch_dims" + value { + i: 0 + } + } +} +node { + name: "report_uninitialized_resources/Const" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "concat/axis" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 0 + } + } + } +} +node { + name: "concat" + op: "ConcatV2" + input: "report_uninitialized_variables/boolean_mask/GatherV2" + input: "report_uninitialized_resources/Const" + input: "concat/axis" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_STRING + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp" + op: "VarIsInitializedOp" + input: "global_step" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_1" + op: "VarIsInitializedOp" + input: "bert/embeddings/word_embeddings" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_2" + op: "VarIsInitializedOp" + input: "bert/embeddings/token_type_embeddings" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_3" + op: "VarIsInitializedOp" + input: "bert/embeddings/position_embeddings" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_4" + op: "VarIsInitializedOp" + input: "bert/embeddings/layer_normalization/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_5" + op: "VarIsInitializedOp" + input: "bert/embeddings/layer_normalization/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_6" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_7" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_8" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_9" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_10" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_11" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_12" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_13" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_14" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_15" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_16" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_17" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_18" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_19" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_20" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_21" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_22" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_23" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_24" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_25" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_26" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_27" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_28" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_29" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_30" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_31" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_32" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_33" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_34" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_35" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_36" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_37" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_38" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_39" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_40" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_41" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_42" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_43" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_44" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_45" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_46" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_47" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_48" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_49" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_50" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_51" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_52" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_53" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_54" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_55" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_56" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_57" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_58" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_59" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_60" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_61" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_62" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_63" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_64" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_65" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_66" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_67" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_68" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_69" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_70" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_71" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_72" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_73" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_74" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_75" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_76" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_77" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_78" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_79" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_80" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_81" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_82" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_83" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_84" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_85" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_86" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_87" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_88" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_89" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_90" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_91" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_92" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_93" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_94" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_95" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_96" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_97" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_98" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_99" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_100" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_101" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_102" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_103" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_104" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_105" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_106" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_107" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_108" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_109" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_110" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_111" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_112" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_113" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_114" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_115" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_116" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_117" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_118" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_119" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_120" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_121" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_122" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_123" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_124" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_125" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_126" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_127" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_128" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_129" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_130" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_131" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_132" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_133" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_134" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_135" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_136" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_137" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_138" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_139" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_140" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_141" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_142" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_143" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_144" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_145" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_146" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_147" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_148" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_149" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_150" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_151" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_152" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_153" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_154" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_155" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_156" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_157" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_158" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_159" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_160" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_161" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_162" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_163" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_164" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_165" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_166" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_167" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_168" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_169" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_170" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_171" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_172" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_173" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_174" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_175" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_176" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_177" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_178" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_179" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_180" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_181" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_182" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/query/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_183" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/query/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_184" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/key/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_185" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/key/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_186" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/value/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_187" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/value/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_188" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_189" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_190" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_191" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_192" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_193" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/intermediate/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_194" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_195" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_196" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_197" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_198" + op: "VarIsInitializedOp" + input: "bert/pooler/dense/kernel" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_199" + op: "VarIsInitializedOp" + input: "bert/pooler/dense/bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_200" + op: "VarIsInitializedOp" + input: "cls/squad/output_weights" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_201" + op: "VarIsInitializedOp" + input: "cls/squad/output_bias" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_202" + op: "VarIsInitializedOp" + input: "bert/embeddings/word_embeddings/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_203" + op: "VarIsInitializedOp" + input: "bert/embeddings/word_embeddings/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_204" + op: "VarIsInitializedOp" + input: "bert/embeddings/token_type_embeddings/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_205" + op: "VarIsInitializedOp" + input: "bert/embeddings/token_type_embeddings/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_206" + op: "VarIsInitializedOp" + input: "bert/embeddings/position_embeddings/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_207" + op: "VarIsInitializedOp" + input: "bert/embeddings/position_embeddings/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_208" + op: "VarIsInitializedOp" + input: "bert/embeddings/layer_normalization/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_209" + op: "VarIsInitializedOp" + input: "bert/embeddings/layer_normalization/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_210" + op: "VarIsInitializedOp" + input: "bert/embeddings/layer_normalization/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_211" + op: "VarIsInitializedOp" + input: "bert/embeddings/layer_normalization/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_212" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_213" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_214" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_215" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_216" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_217" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_218" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_219" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_220" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_221" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_222" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_223" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_224" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_225" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_226" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_227" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_228" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_229" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_230" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_231" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_232" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_233" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_234" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_235" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_236" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_237" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_238" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_239" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_240" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_241" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_242" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_243" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_244" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_245" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_246" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_247" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_248" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_249" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_250" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_251" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_252" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_253" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_254" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_255" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_256" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_257" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_258" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_259" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_260" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_261" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_262" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_263" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_264" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_265" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_266" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_267" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_268" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_269" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_270" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_271" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_272" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_273" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_274" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_275" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_276" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_277" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_278" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_279" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_280" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_281" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_282" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_283" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_284" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_285" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_286" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_287" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_288" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_289" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_290" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_291" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_292" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_293" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_294" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_295" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_296" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_297" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_298" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_299" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_300" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_301" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_302" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_303" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_304" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_305" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_306" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_307" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_308" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_309" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_310" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_311" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_312" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_313" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_314" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_315" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_316" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_317" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_318" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_319" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_320" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_321" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_322" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_323" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_324" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_325" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_326" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_327" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_328" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_329" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_330" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_331" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_332" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_333" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_334" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_335" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_336" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_337" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_338" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_339" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_340" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_341" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_342" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_343" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_344" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_345" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_346" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_347" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_348" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_349" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_350" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_351" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_352" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_353" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_354" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_355" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_356" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_357" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_358" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_359" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_360" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_361" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_362" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_363" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_364" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_365" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_366" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_367" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_368" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_369" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_370" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_371" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_372" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_373" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_374" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_375" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_376" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_377" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_378" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_379" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_380" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_381" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_382" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_383" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_384" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_385" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_386" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_387" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_388" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_389" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_390" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_391" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_392" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_393" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_394" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_395" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_396" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_397" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_398" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_399" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_400" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_401" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_402" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_403" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_404" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_405" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_406" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_407" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_408" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_409" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_410" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_411" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_412" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_413" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_414" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_415" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_416" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_417" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_418" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_419" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_420" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_421" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_422" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_423" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_424" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_425" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_426" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_427" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_428" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_429" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_430" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_431" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_432" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_433" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_434" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_435" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_436" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_437" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_438" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_439" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_440" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_441" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_442" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_443" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_444" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_445" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_446" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_447" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_448" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_449" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_450" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_451" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_452" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_453" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_454" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_455" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_456" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_457" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_458" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_459" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_460" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_461" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_462" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_463" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_464" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_465" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_466" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_467" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_468" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_469" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_470" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_471" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_472" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_473" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_474" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_475" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_476" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_477" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_478" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_479" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_480" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_481" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_482" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_483" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_484" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_485" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_486" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_487" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_488" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_489" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_490" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_491" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_492" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_493" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_494" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_495" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_496" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_497" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_498" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_499" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_500" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_501" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_502" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_503" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_504" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_505" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_506" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_507" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_508" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_509" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_510" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_511" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_512" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_513" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_514" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_515" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_516" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_517" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_518" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_519" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_520" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_521" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_522" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_523" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_524" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_525" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_526" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_527" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_528" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_529" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_530" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_531" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_532" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_533" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_534" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_535" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_536" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_537" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_538" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_539" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_540" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_541" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_542" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_543" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_544" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_545" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_546" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_547" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_548" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_549" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_550" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_551" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_552" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_553" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_554" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_555" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_556" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_557" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_558" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_559" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_560" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_561" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_562" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_563" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_564" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_565" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_566" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_567" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_568" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_569" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_570" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_571" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_572" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_573" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_574" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_575" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_576" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_577" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_578" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_579" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_580" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_581" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_582" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_583" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_584" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_585" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_586" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_587" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_588" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/dense/kernel/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_589" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/dense/kernel/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_590" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/dense/bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_591" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/dense/bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_592" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_593" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_594" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_595" + op: "VarIsInitializedOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_596" + op: "VarIsInitializedOp" + input: "cls/squad/output_weights/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_597" + op: "VarIsInitializedOp" + input: "cls/squad/output_weights/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_598" + op: "VarIsInitializedOp" + input: "cls/squad/output_bias/adam_m" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/VarIsInitializedOp_599" + op: "VarIsInitializedOp" + input: "cls/squad/output_bias/adam_v" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/stack" + op: "Pack" + input: "report_uninitialized_variables_1/VarIsInitializedOp" + input: "report_uninitialized_variables_1/VarIsInitializedOp_1" + input: "report_uninitialized_variables_1/VarIsInitializedOp_2" + input: "report_uninitialized_variables_1/VarIsInitializedOp_3" + input: "report_uninitialized_variables_1/VarIsInitializedOp_4" + input: "report_uninitialized_variables_1/VarIsInitializedOp_5" + input: "report_uninitialized_variables_1/VarIsInitializedOp_6" + input: "report_uninitialized_variables_1/VarIsInitializedOp_7" + input: "report_uninitialized_variables_1/VarIsInitializedOp_8" + input: "report_uninitialized_variables_1/VarIsInitializedOp_9" + input: "report_uninitialized_variables_1/VarIsInitializedOp_10" + input: "report_uninitialized_variables_1/VarIsInitializedOp_11" + input: "report_uninitialized_variables_1/VarIsInitializedOp_12" + input: "report_uninitialized_variables_1/VarIsInitializedOp_13" + input: "report_uninitialized_variables_1/VarIsInitializedOp_14" + input: "report_uninitialized_variables_1/VarIsInitializedOp_15" + input: "report_uninitialized_variables_1/VarIsInitializedOp_16" + input: "report_uninitialized_variables_1/VarIsInitializedOp_17" + input: "report_uninitialized_variables_1/VarIsInitializedOp_18" + input: "report_uninitialized_variables_1/VarIsInitializedOp_19" + input: "report_uninitialized_variables_1/VarIsInitializedOp_20" + input: "report_uninitialized_variables_1/VarIsInitializedOp_21" + input: "report_uninitialized_variables_1/VarIsInitializedOp_22" + input: "report_uninitialized_variables_1/VarIsInitializedOp_23" + input: "report_uninitialized_variables_1/VarIsInitializedOp_24" + input: "report_uninitialized_variables_1/VarIsInitializedOp_25" + input: "report_uninitialized_variables_1/VarIsInitializedOp_26" + input: "report_uninitialized_variables_1/VarIsInitializedOp_27" + input: "report_uninitialized_variables_1/VarIsInitializedOp_28" + input: "report_uninitialized_variables_1/VarIsInitializedOp_29" + input: "report_uninitialized_variables_1/VarIsInitializedOp_30" + input: "report_uninitialized_variables_1/VarIsInitializedOp_31" + input: "report_uninitialized_variables_1/VarIsInitializedOp_32" + input: "report_uninitialized_variables_1/VarIsInitializedOp_33" + input: "report_uninitialized_variables_1/VarIsInitializedOp_34" + input: "report_uninitialized_variables_1/VarIsInitializedOp_35" + input: "report_uninitialized_variables_1/VarIsInitializedOp_36" + input: "report_uninitialized_variables_1/VarIsInitializedOp_37" + input: "report_uninitialized_variables_1/VarIsInitializedOp_38" + input: "report_uninitialized_variables_1/VarIsInitializedOp_39" + input: "report_uninitialized_variables_1/VarIsInitializedOp_40" + input: "report_uninitialized_variables_1/VarIsInitializedOp_41" + input: "report_uninitialized_variables_1/VarIsInitializedOp_42" + input: "report_uninitialized_variables_1/VarIsInitializedOp_43" + input: "report_uninitialized_variables_1/VarIsInitializedOp_44" + input: "report_uninitialized_variables_1/VarIsInitializedOp_45" + input: "report_uninitialized_variables_1/VarIsInitializedOp_46" + input: "report_uninitialized_variables_1/VarIsInitializedOp_47" + input: "report_uninitialized_variables_1/VarIsInitializedOp_48" + input: "report_uninitialized_variables_1/VarIsInitializedOp_49" + input: "report_uninitialized_variables_1/VarIsInitializedOp_50" + input: "report_uninitialized_variables_1/VarIsInitializedOp_51" + input: "report_uninitialized_variables_1/VarIsInitializedOp_52" + input: "report_uninitialized_variables_1/VarIsInitializedOp_53" + input: "report_uninitialized_variables_1/VarIsInitializedOp_54" + input: "report_uninitialized_variables_1/VarIsInitializedOp_55" + input: "report_uninitialized_variables_1/VarIsInitializedOp_56" + input: "report_uninitialized_variables_1/VarIsInitializedOp_57" + input: "report_uninitialized_variables_1/VarIsInitializedOp_58" + input: "report_uninitialized_variables_1/VarIsInitializedOp_59" + input: "report_uninitialized_variables_1/VarIsInitializedOp_60" + input: "report_uninitialized_variables_1/VarIsInitializedOp_61" + input: "report_uninitialized_variables_1/VarIsInitializedOp_62" + input: "report_uninitialized_variables_1/VarIsInitializedOp_63" + input: "report_uninitialized_variables_1/VarIsInitializedOp_64" + input: "report_uninitialized_variables_1/VarIsInitializedOp_65" + input: "report_uninitialized_variables_1/VarIsInitializedOp_66" + input: "report_uninitialized_variables_1/VarIsInitializedOp_67" + input: "report_uninitialized_variables_1/VarIsInitializedOp_68" + input: "report_uninitialized_variables_1/VarIsInitializedOp_69" + input: "report_uninitialized_variables_1/VarIsInitializedOp_70" + input: "report_uninitialized_variables_1/VarIsInitializedOp_71" + input: "report_uninitialized_variables_1/VarIsInitializedOp_72" + input: "report_uninitialized_variables_1/VarIsInitializedOp_73" + input: "report_uninitialized_variables_1/VarIsInitializedOp_74" + input: "report_uninitialized_variables_1/VarIsInitializedOp_75" + input: "report_uninitialized_variables_1/VarIsInitializedOp_76" + input: "report_uninitialized_variables_1/VarIsInitializedOp_77" + input: "report_uninitialized_variables_1/VarIsInitializedOp_78" + input: "report_uninitialized_variables_1/VarIsInitializedOp_79" + input: "report_uninitialized_variables_1/VarIsInitializedOp_80" + input: "report_uninitialized_variables_1/VarIsInitializedOp_81" + input: "report_uninitialized_variables_1/VarIsInitializedOp_82" + input: "report_uninitialized_variables_1/VarIsInitializedOp_83" + input: "report_uninitialized_variables_1/VarIsInitializedOp_84" + input: "report_uninitialized_variables_1/VarIsInitializedOp_85" + input: "report_uninitialized_variables_1/VarIsInitializedOp_86" + input: "report_uninitialized_variables_1/VarIsInitializedOp_87" + input: "report_uninitialized_variables_1/VarIsInitializedOp_88" + input: "report_uninitialized_variables_1/VarIsInitializedOp_89" + input: "report_uninitialized_variables_1/VarIsInitializedOp_90" + input: "report_uninitialized_variables_1/VarIsInitializedOp_91" + input: "report_uninitialized_variables_1/VarIsInitializedOp_92" + input: "report_uninitialized_variables_1/VarIsInitializedOp_93" + input: "report_uninitialized_variables_1/VarIsInitializedOp_94" + input: "report_uninitialized_variables_1/VarIsInitializedOp_95" + input: "report_uninitialized_variables_1/VarIsInitializedOp_96" + input: "report_uninitialized_variables_1/VarIsInitializedOp_97" + input: "report_uninitialized_variables_1/VarIsInitializedOp_98" + input: "report_uninitialized_variables_1/VarIsInitializedOp_99" + input: "report_uninitialized_variables_1/VarIsInitializedOp_100" + input: "report_uninitialized_variables_1/VarIsInitializedOp_101" + input: "report_uninitialized_variables_1/VarIsInitializedOp_102" + input: "report_uninitialized_variables_1/VarIsInitializedOp_103" + input: "report_uninitialized_variables_1/VarIsInitializedOp_104" + input: "report_uninitialized_variables_1/VarIsInitializedOp_105" + input: "report_uninitialized_variables_1/VarIsInitializedOp_106" + input: "report_uninitialized_variables_1/VarIsInitializedOp_107" + input: "report_uninitialized_variables_1/VarIsInitializedOp_108" + input: "report_uninitialized_variables_1/VarIsInitializedOp_109" + input: "report_uninitialized_variables_1/VarIsInitializedOp_110" + input: "report_uninitialized_variables_1/VarIsInitializedOp_111" + input: "report_uninitialized_variables_1/VarIsInitializedOp_112" + input: "report_uninitialized_variables_1/VarIsInitializedOp_113" + input: "report_uninitialized_variables_1/VarIsInitializedOp_114" + input: "report_uninitialized_variables_1/VarIsInitializedOp_115" + input: "report_uninitialized_variables_1/VarIsInitializedOp_116" + input: "report_uninitialized_variables_1/VarIsInitializedOp_117" + input: "report_uninitialized_variables_1/VarIsInitializedOp_118" + input: "report_uninitialized_variables_1/VarIsInitializedOp_119" + input: "report_uninitialized_variables_1/VarIsInitializedOp_120" + input: "report_uninitialized_variables_1/VarIsInitializedOp_121" + input: "report_uninitialized_variables_1/VarIsInitializedOp_122" + input: "report_uninitialized_variables_1/VarIsInitializedOp_123" + input: "report_uninitialized_variables_1/VarIsInitializedOp_124" + input: "report_uninitialized_variables_1/VarIsInitializedOp_125" + input: "report_uninitialized_variables_1/VarIsInitializedOp_126" + input: "report_uninitialized_variables_1/VarIsInitializedOp_127" + input: "report_uninitialized_variables_1/VarIsInitializedOp_128" + input: "report_uninitialized_variables_1/VarIsInitializedOp_129" + input: "report_uninitialized_variables_1/VarIsInitializedOp_130" + input: "report_uninitialized_variables_1/VarIsInitializedOp_131" + input: "report_uninitialized_variables_1/VarIsInitializedOp_132" + input: "report_uninitialized_variables_1/VarIsInitializedOp_133" + input: "report_uninitialized_variables_1/VarIsInitializedOp_134" + input: "report_uninitialized_variables_1/VarIsInitializedOp_135" + input: "report_uninitialized_variables_1/VarIsInitializedOp_136" + input: "report_uninitialized_variables_1/VarIsInitializedOp_137" + input: "report_uninitialized_variables_1/VarIsInitializedOp_138" + input: "report_uninitialized_variables_1/VarIsInitializedOp_139" + input: "report_uninitialized_variables_1/VarIsInitializedOp_140" + input: "report_uninitialized_variables_1/VarIsInitializedOp_141" + input: "report_uninitialized_variables_1/VarIsInitializedOp_142" + input: "report_uninitialized_variables_1/VarIsInitializedOp_143" + input: "report_uninitialized_variables_1/VarIsInitializedOp_144" + input: "report_uninitialized_variables_1/VarIsInitializedOp_145" + input: "report_uninitialized_variables_1/VarIsInitializedOp_146" + input: "report_uninitialized_variables_1/VarIsInitializedOp_147" + input: "report_uninitialized_variables_1/VarIsInitializedOp_148" + input: "report_uninitialized_variables_1/VarIsInitializedOp_149" + input: "report_uninitialized_variables_1/VarIsInitializedOp_150" + input: "report_uninitialized_variables_1/VarIsInitializedOp_151" + input: "report_uninitialized_variables_1/VarIsInitializedOp_152" + input: "report_uninitialized_variables_1/VarIsInitializedOp_153" + input: "report_uninitialized_variables_1/VarIsInitializedOp_154" + input: "report_uninitialized_variables_1/VarIsInitializedOp_155" + input: "report_uninitialized_variables_1/VarIsInitializedOp_156" + input: "report_uninitialized_variables_1/VarIsInitializedOp_157" + input: "report_uninitialized_variables_1/VarIsInitializedOp_158" + input: "report_uninitialized_variables_1/VarIsInitializedOp_159" + input: "report_uninitialized_variables_1/VarIsInitializedOp_160" + input: "report_uninitialized_variables_1/VarIsInitializedOp_161" + input: "report_uninitialized_variables_1/VarIsInitializedOp_162" + input: "report_uninitialized_variables_1/VarIsInitializedOp_163" + input: "report_uninitialized_variables_1/VarIsInitializedOp_164" + input: "report_uninitialized_variables_1/VarIsInitializedOp_165" + input: "report_uninitialized_variables_1/VarIsInitializedOp_166" + input: "report_uninitialized_variables_1/VarIsInitializedOp_167" + input: "report_uninitialized_variables_1/VarIsInitializedOp_168" + input: "report_uninitialized_variables_1/VarIsInitializedOp_169" + input: "report_uninitialized_variables_1/VarIsInitializedOp_170" + input: "report_uninitialized_variables_1/VarIsInitializedOp_171" + input: "report_uninitialized_variables_1/VarIsInitializedOp_172" + input: "report_uninitialized_variables_1/VarIsInitializedOp_173" + input: "report_uninitialized_variables_1/VarIsInitializedOp_174" + input: "report_uninitialized_variables_1/VarIsInitializedOp_175" + input: "report_uninitialized_variables_1/VarIsInitializedOp_176" + input: "report_uninitialized_variables_1/VarIsInitializedOp_177" + input: "report_uninitialized_variables_1/VarIsInitializedOp_178" + input: "report_uninitialized_variables_1/VarIsInitializedOp_179" + input: "report_uninitialized_variables_1/VarIsInitializedOp_180" + input: "report_uninitialized_variables_1/VarIsInitializedOp_181" + input: "report_uninitialized_variables_1/VarIsInitializedOp_182" + input: "report_uninitialized_variables_1/VarIsInitializedOp_183" + input: "report_uninitialized_variables_1/VarIsInitializedOp_184" + input: "report_uninitialized_variables_1/VarIsInitializedOp_185" + input: "report_uninitialized_variables_1/VarIsInitializedOp_186" + input: "report_uninitialized_variables_1/VarIsInitializedOp_187" + input: "report_uninitialized_variables_1/VarIsInitializedOp_188" + input: "report_uninitialized_variables_1/VarIsInitializedOp_189" + input: "report_uninitialized_variables_1/VarIsInitializedOp_190" + input: "report_uninitialized_variables_1/VarIsInitializedOp_191" + input: "report_uninitialized_variables_1/VarIsInitializedOp_192" + input: "report_uninitialized_variables_1/VarIsInitializedOp_193" + input: "report_uninitialized_variables_1/VarIsInitializedOp_194" + input: "report_uninitialized_variables_1/VarIsInitializedOp_195" + input: "report_uninitialized_variables_1/VarIsInitializedOp_196" + input: "report_uninitialized_variables_1/VarIsInitializedOp_197" + input: "report_uninitialized_variables_1/VarIsInitializedOp_198" + input: "report_uninitialized_variables_1/VarIsInitializedOp_199" + input: "report_uninitialized_variables_1/VarIsInitializedOp_200" + input: "report_uninitialized_variables_1/VarIsInitializedOp_201" + input: "report_uninitialized_variables_1/VarIsInitializedOp_202" + input: "report_uninitialized_variables_1/VarIsInitializedOp_203" + input: "report_uninitialized_variables_1/VarIsInitializedOp_204" + input: "report_uninitialized_variables_1/VarIsInitializedOp_205" + input: "report_uninitialized_variables_1/VarIsInitializedOp_206" + input: "report_uninitialized_variables_1/VarIsInitializedOp_207" + input: "report_uninitialized_variables_1/VarIsInitializedOp_208" + input: "report_uninitialized_variables_1/VarIsInitializedOp_209" + input: "report_uninitialized_variables_1/VarIsInitializedOp_210" + input: "report_uninitialized_variables_1/VarIsInitializedOp_211" + input: "report_uninitialized_variables_1/VarIsInitializedOp_212" + input: "report_uninitialized_variables_1/VarIsInitializedOp_213" + input: "report_uninitialized_variables_1/VarIsInitializedOp_214" + input: "report_uninitialized_variables_1/VarIsInitializedOp_215" + input: "report_uninitialized_variables_1/VarIsInitializedOp_216" + input: "report_uninitialized_variables_1/VarIsInitializedOp_217" + input: "report_uninitialized_variables_1/VarIsInitializedOp_218" + input: "report_uninitialized_variables_1/VarIsInitializedOp_219" + input: "report_uninitialized_variables_1/VarIsInitializedOp_220" + input: "report_uninitialized_variables_1/VarIsInitializedOp_221" + input: "report_uninitialized_variables_1/VarIsInitializedOp_222" + input: "report_uninitialized_variables_1/VarIsInitializedOp_223" + input: "report_uninitialized_variables_1/VarIsInitializedOp_224" + input: "report_uninitialized_variables_1/VarIsInitializedOp_225" + input: "report_uninitialized_variables_1/VarIsInitializedOp_226" + input: "report_uninitialized_variables_1/VarIsInitializedOp_227" + input: "report_uninitialized_variables_1/VarIsInitializedOp_228" + input: "report_uninitialized_variables_1/VarIsInitializedOp_229" + input: "report_uninitialized_variables_1/VarIsInitializedOp_230" + input: "report_uninitialized_variables_1/VarIsInitializedOp_231" + input: "report_uninitialized_variables_1/VarIsInitializedOp_232" + input: "report_uninitialized_variables_1/VarIsInitializedOp_233" + input: "report_uninitialized_variables_1/VarIsInitializedOp_234" + input: "report_uninitialized_variables_1/VarIsInitializedOp_235" + input: "report_uninitialized_variables_1/VarIsInitializedOp_236" + input: "report_uninitialized_variables_1/VarIsInitializedOp_237" + input: "report_uninitialized_variables_1/VarIsInitializedOp_238" + input: "report_uninitialized_variables_1/VarIsInitializedOp_239" + input: "report_uninitialized_variables_1/VarIsInitializedOp_240" + input: "report_uninitialized_variables_1/VarIsInitializedOp_241" + input: "report_uninitialized_variables_1/VarIsInitializedOp_242" + input: "report_uninitialized_variables_1/VarIsInitializedOp_243" + input: "report_uninitialized_variables_1/VarIsInitializedOp_244" + input: "report_uninitialized_variables_1/VarIsInitializedOp_245" + input: "report_uninitialized_variables_1/VarIsInitializedOp_246" + input: "report_uninitialized_variables_1/VarIsInitializedOp_247" + input: "report_uninitialized_variables_1/VarIsInitializedOp_248" + input: "report_uninitialized_variables_1/VarIsInitializedOp_249" + input: "report_uninitialized_variables_1/VarIsInitializedOp_250" + input: "report_uninitialized_variables_1/VarIsInitializedOp_251" + input: "report_uninitialized_variables_1/VarIsInitializedOp_252" + input: "report_uninitialized_variables_1/VarIsInitializedOp_253" + input: "report_uninitialized_variables_1/VarIsInitializedOp_254" + input: "report_uninitialized_variables_1/VarIsInitializedOp_255" + input: "report_uninitialized_variables_1/VarIsInitializedOp_256" + input: "report_uninitialized_variables_1/VarIsInitializedOp_257" + input: "report_uninitialized_variables_1/VarIsInitializedOp_258" + input: "report_uninitialized_variables_1/VarIsInitializedOp_259" + input: "report_uninitialized_variables_1/VarIsInitializedOp_260" + input: "report_uninitialized_variables_1/VarIsInitializedOp_261" + input: "report_uninitialized_variables_1/VarIsInitializedOp_262" + input: "report_uninitialized_variables_1/VarIsInitializedOp_263" + input: "report_uninitialized_variables_1/VarIsInitializedOp_264" + input: "report_uninitialized_variables_1/VarIsInitializedOp_265" + input: "report_uninitialized_variables_1/VarIsInitializedOp_266" + input: "report_uninitialized_variables_1/VarIsInitializedOp_267" + input: "report_uninitialized_variables_1/VarIsInitializedOp_268" + input: "report_uninitialized_variables_1/VarIsInitializedOp_269" + input: "report_uninitialized_variables_1/VarIsInitializedOp_270" + input: "report_uninitialized_variables_1/VarIsInitializedOp_271" + input: "report_uninitialized_variables_1/VarIsInitializedOp_272" + input: "report_uninitialized_variables_1/VarIsInitializedOp_273" + input: "report_uninitialized_variables_1/VarIsInitializedOp_274" + input: "report_uninitialized_variables_1/VarIsInitializedOp_275" + input: "report_uninitialized_variables_1/VarIsInitializedOp_276" + input: "report_uninitialized_variables_1/VarIsInitializedOp_277" + input: "report_uninitialized_variables_1/VarIsInitializedOp_278" + input: "report_uninitialized_variables_1/VarIsInitializedOp_279" + input: "report_uninitialized_variables_1/VarIsInitializedOp_280" + input: "report_uninitialized_variables_1/VarIsInitializedOp_281" + input: "report_uninitialized_variables_1/VarIsInitializedOp_282" + input: "report_uninitialized_variables_1/VarIsInitializedOp_283" + input: "report_uninitialized_variables_1/VarIsInitializedOp_284" + input: "report_uninitialized_variables_1/VarIsInitializedOp_285" + input: "report_uninitialized_variables_1/VarIsInitializedOp_286" + input: "report_uninitialized_variables_1/VarIsInitializedOp_287" + input: "report_uninitialized_variables_1/VarIsInitializedOp_288" + input: "report_uninitialized_variables_1/VarIsInitializedOp_289" + input: "report_uninitialized_variables_1/VarIsInitializedOp_290" + input: "report_uninitialized_variables_1/VarIsInitializedOp_291" + input: "report_uninitialized_variables_1/VarIsInitializedOp_292" + input: "report_uninitialized_variables_1/VarIsInitializedOp_293" + input: "report_uninitialized_variables_1/VarIsInitializedOp_294" + input: "report_uninitialized_variables_1/VarIsInitializedOp_295" + input: "report_uninitialized_variables_1/VarIsInitializedOp_296" + input: "report_uninitialized_variables_1/VarIsInitializedOp_297" + input: "report_uninitialized_variables_1/VarIsInitializedOp_298" + input: "report_uninitialized_variables_1/VarIsInitializedOp_299" + input: "report_uninitialized_variables_1/VarIsInitializedOp_300" + input: "report_uninitialized_variables_1/VarIsInitializedOp_301" + input: "report_uninitialized_variables_1/VarIsInitializedOp_302" + input: "report_uninitialized_variables_1/VarIsInitializedOp_303" + input: "report_uninitialized_variables_1/VarIsInitializedOp_304" + input: "report_uninitialized_variables_1/VarIsInitializedOp_305" + input: "report_uninitialized_variables_1/VarIsInitializedOp_306" + input: "report_uninitialized_variables_1/VarIsInitializedOp_307" + input: "report_uninitialized_variables_1/VarIsInitializedOp_308" + input: "report_uninitialized_variables_1/VarIsInitializedOp_309" + input: "report_uninitialized_variables_1/VarIsInitializedOp_310" + input: "report_uninitialized_variables_1/VarIsInitializedOp_311" + input: "report_uninitialized_variables_1/VarIsInitializedOp_312" + input: "report_uninitialized_variables_1/VarIsInitializedOp_313" + input: "report_uninitialized_variables_1/VarIsInitializedOp_314" + input: "report_uninitialized_variables_1/VarIsInitializedOp_315" + input: "report_uninitialized_variables_1/VarIsInitializedOp_316" + input: "report_uninitialized_variables_1/VarIsInitializedOp_317" + input: "report_uninitialized_variables_1/VarIsInitializedOp_318" + input: "report_uninitialized_variables_1/VarIsInitializedOp_319" + input: "report_uninitialized_variables_1/VarIsInitializedOp_320" + input: "report_uninitialized_variables_1/VarIsInitializedOp_321" + input: "report_uninitialized_variables_1/VarIsInitializedOp_322" + input: "report_uninitialized_variables_1/VarIsInitializedOp_323" + input: "report_uninitialized_variables_1/VarIsInitializedOp_324" + input: "report_uninitialized_variables_1/VarIsInitializedOp_325" + input: "report_uninitialized_variables_1/VarIsInitializedOp_326" + input: "report_uninitialized_variables_1/VarIsInitializedOp_327" + input: "report_uninitialized_variables_1/VarIsInitializedOp_328" + input: "report_uninitialized_variables_1/VarIsInitializedOp_329" + input: "report_uninitialized_variables_1/VarIsInitializedOp_330" + input: "report_uninitialized_variables_1/VarIsInitializedOp_331" + input: "report_uninitialized_variables_1/VarIsInitializedOp_332" + input: "report_uninitialized_variables_1/VarIsInitializedOp_333" + input: "report_uninitialized_variables_1/VarIsInitializedOp_334" + input: "report_uninitialized_variables_1/VarIsInitializedOp_335" + input: "report_uninitialized_variables_1/VarIsInitializedOp_336" + input: "report_uninitialized_variables_1/VarIsInitializedOp_337" + input: "report_uninitialized_variables_1/VarIsInitializedOp_338" + input: "report_uninitialized_variables_1/VarIsInitializedOp_339" + input: "report_uninitialized_variables_1/VarIsInitializedOp_340" + input: "report_uninitialized_variables_1/VarIsInitializedOp_341" + input: "report_uninitialized_variables_1/VarIsInitializedOp_342" + input: "report_uninitialized_variables_1/VarIsInitializedOp_343" + input: "report_uninitialized_variables_1/VarIsInitializedOp_344" + input: "report_uninitialized_variables_1/VarIsInitializedOp_345" + input: "report_uninitialized_variables_1/VarIsInitializedOp_346" + input: "report_uninitialized_variables_1/VarIsInitializedOp_347" + input: "report_uninitialized_variables_1/VarIsInitializedOp_348" + input: "report_uninitialized_variables_1/VarIsInitializedOp_349" + input: "report_uninitialized_variables_1/VarIsInitializedOp_350" + input: "report_uninitialized_variables_1/VarIsInitializedOp_351" + input: "report_uninitialized_variables_1/VarIsInitializedOp_352" + input: "report_uninitialized_variables_1/VarIsInitializedOp_353" + input: "report_uninitialized_variables_1/VarIsInitializedOp_354" + input: "report_uninitialized_variables_1/VarIsInitializedOp_355" + input: "report_uninitialized_variables_1/VarIsInitializedOp_356" + input: "report_uninitialized_variables_1/VarIsInitializedOp_357" + input: "report_uninitialized_variables_1/VarIsInitializedOp_358" + input: "report_uninitialized_variables_1/VarIsInitializedOp_359" + input: "report_uninitialized_variables_1/VarIsInitializedOp_360" + input: "report_uninitialized_variables_1/VarIsInitializedOp_361" + input: "report_uninitialized_variables_1/VarIsInitializedOp_362" + input: "report_uninitialized_variables_1/VarIsInitializedOp_363" + input: "report_uninitialized_variables_1/VarIsInitializedOp_364" + input: "report_uninitialized_variables_1/VarIsInitializedOp_365" + input: "report_uninitialized_variables_1/VarIsInitializedOp_366" + input: "report_uninitialized_variables_1/VarIsInitializedOp_367" + input: "report_uninitialized_variables_1/VarIsInitializedOp_368" + input: "report_uninitialized_variables_1/VarIsInitializedOp_369" + input: "report_uninitialized_variables_1/VarIsInitializedOp_370" + input: "report_uninitialized_variables_1/VarIsInitializedOp_371" + input: "report_uninitialized_variables_1/VarIsInitializedOp_372" + input: "report_uninitialized_variables_1/VarIsInitializedOp_373" + input: "report_uninitialized_variables_1/VarIsInitializedOp_374" + input: "report_uninitialized_variables_1/VarIsInitializedOp_375" + input: "report_uninitialized_variables_1/VarIsInitializedOp_376" + input: "report_uninitialized_variables_1/VarIsInitializedOp_377" + input: "report_uninitialized_variables_1/VarIsInitializedOp_378" + input: "report_uninitialized_variables_1/VarIsInitializedOp_379" + input: "report_uninitialized_variables_1/VarIsInitializedOp_380" + input: "report_uninitialized_variables_1/VarIsInitializedOp_381" + input: "report_uninitialized_variables_1/VarIsInitializedOp_382" + input: "report_uninitialized_variables_1/VarIsInitializedOp_383" + input: "report_uninitialized_variables_1/VarIsInitializedOp_384" + input: "report_uninitialized_variables_1/VarIsInitializedOp_385" + input: "report_uninitialized_variables_1/VarIsInitializedOp_386" + input: "report_uninitialized_variables_1/VarIsInitializedOp_387" + input: "report_uninitialized_variables_1/VarIsInitializedOp_388" + input: "report_uninitialized_variables_1/VarIsInitializedOp_389" + input: "report_uninitialized_variables_1/VarIsInitializedOp_390" + input: "report_uninitialized_variables_1/VarIsInitializedOp_391" + input: "report_uninitialized_variables_1/VarIsInitializedOp_392" + input: "report_uninitialized_variables_1/VarIsInitializedOp_393" + input: "report_uninitialized_variables_1/VarIsInitializedOp_394" + input: "report_uninitialized_variables_1/VarIsInitializedOp_395" + input: "report_uninitialized_variables_1/VarIsInitializedOp_396" + input: "report_uninitialized_variables_1/VarIsInitializedOp_397" + input: "report_uninitialized_variables_1/VarIsInitializedOp_398" + input: "report_uninitialized_variables_1/VarIsInitializedOp_399" + input: "report_uninitialized_variables_1/VarIsInitializedOp_400" + input: "report_uninitialized_variables_1/VarIsInitializedOp_401" + input: "report_uninitialized_variables_1/VarIsInitializedOp_402" + input: "report_uninitialized_variables_1/VarIsInitializedOp_403" + input: "report_uninitialized_variables_1/VarIsInitializedOp_404" + input: "report_uninitialized_variables_1/VarIsInitializedOp_405" + input: "report_uninitialized_variables_1/VarIsInitializedOp_406" + input: "report_uninitialized_variables_1/VarIsInitializedOp_407" + input: "report_uninitialized_variables_1/VarIsInitializedOp_408" + input: "report_uninitialized_variables_1/VarIsInitializedOp_409" + input: "report_uninitialized_variables_1/VarIsInitializedOp_410" + input: "report_uninitialized_variables_1/VarIsInitializedOp_411" + input: "report_uninitialized_variables_1/VarIsInitializedOp_412" + input: "report_uninitialized_variables_1/VarIsInitializedOp_413" + input: "report_uninitialized_variables_1/VarIsInitializedOp_414" + input: "report_uninitialized_variables_1/VarIsInitializedOp_415" + input: "report_uninitialized_variables_1/VarIsInitializedOp_416" + input: "report_uninitialized_variables_1/VarIsInitializedOp_417" + input: "report_uninitialized_variables_1/VarIsInitializedOp_418" + input: "report_uninitialized_variables_1/VarIsInitializedOp_419" + input: "report_uninitialized_variables_1/VarIsInitializedOp_420" + input: "report_uninitialized_variables_1/VarIsInitializedOp_421" + input: "report_uninitialized_variables_1/VarIsInitializedOp_422" + input: "report_uninitialized_variables_1/VarIsInitializedOp_423" + input: "report_uninitialized_variables_1/VarIsInitializedOp_424" + input: "report_uninitialized_variables_1/VarIsInitializedOp_425" + input: "report_uninitialized_variables_1/VarIsInitializedOp_426" + input: "report_uninitialized_variables_1/VarIsInitializedOp_427" + input: "report_uninitialized_variables_1/VarIsInitializedOp_428" + input: "report_uninitialized_variables_1/VarIsInitializedOp_429" + input: "report_uninitialized_variables_1/VarIsInitializedOp_430" + input: "report_uninitialized_variables_1/VarIsInitializedOp_431" + input: "report_uninitialized_variables_1/VarIsInitializedOp_432" + input: "report_uninitialized_variables_1/VarIsInitializedOp_433" + input: "report_uninitialized_variables_1/VarIsInitializedOp_434" + input: "report_uninitialized_variables_1/VarIsInitializedOp_435" + input: "report_uninitialized_variables_1/VarIsInitializedOp_436" + input: "report_uninitialized_variables_1/VarIsInitializedOp_437" + input: "report_uninitialized_variables_1/VarIsInitializedOp_438" + input: "report_uninitialized_variables_1/VarIsInitializedOp_439" + input: "report_uninitialized_variables_1/VarIsInitializedOp_440" + input: "report_uninitialized_variables_1/VarIsInitializedOp_441" + input: "report_uninitialized_variables_1/VarIsInitializedOp_442" + input: "report_uninitialized_variables_1/VarIsInitializedOp_443" + input: "report_uninitialized_variables_1/VarIsInitializedOp_444" + input: "report_uninitialized_variables_1/VarIsInitializedOp_445" + input: "report_uninitialized_variables_1/VarIsInitializedOp_446" + input: "report_uninitialized_variables_1/VarIsInitializedOp_447" + input: "report_uninitialized_variables_1/VarIsInitializedOp_448" + input: "report_uninitialized_variables_1/VarIsInitializedOp_449" + input: "report_uninitialized_variables_1/VarIsInitializedOp_450" + input: "report_uninitialized_variables_1/VarIsInitializedOp_451" + input: "report_uninitialized_variables_1/VarIsInitializedOp_452" + input: "report_uninitialized_variables_1/VarIsInitializedOp_453" + input: "report_uninitialized_variables_1/VarIsInitializedOp_454" + input: "report_uninitialized_variables_1/VarIsInitializedOp_455" + input: "report_uninitialized_variables_1/VarIsInitializedOp_456" + input: "report_uninitialized_variables_1/VarIsInitializedOp_457" + input: "report_uninitialized_variables_1/VarIsInitializedOp_458" + input: "report_uninitialized_variables_1/VarIsInitializedOp_459" + input: "report_uninitialized_variables_1/VarIsInitializedOp_460" + input: "report_uninitialized_variables_1/VarIsInitializedOp_461" + input: "report_uninitialized_variables_1/VarIsInitializedOp_462" + input: "report_uninitialized_variables_1/VarIsInitializedOp_463" + input: "report_uninitialized_variables_1/VarIsInitializedOp_464" + input: "report_uninitialized_variables_1/VarIsInitializedOp_465" + input: "report_uninitialized_variables_1/VarIsInitializedOp_466" + input: "report_uninitialized_variables_1/VarIsInitializedOp_467" + input: "report_uninitialized_variables_1/VarIsInitializedOp_468" + input: "report_uninitialized_variables_1/VarIsInitializedOp_469" + input: "report_uninitialized_variables_1/VarIsInitializedOp_470" + input: "report_uninitialized_variables_1/VarIsInitializedOp_471" + input: "report_uninitialized_variables_1/VarIsInitializedOp_472" + input: "report_uninitialized_variables_1/VarIsInitializedOp_473" + input: "report_uninitialized_variables_1/VarIsInitializedOp_474" + input: "report_uninitialized_variables_1/VarIsInitializedOp_475" + input: "report_uninitialized_variables_1/VarIsInitializedOp_476" + input: "report_uninitialized_variables_1/VarIsInitializedOp_477" + input: "report_uninitialized_variables_1/VarIsInitializedOp_478" + input: "report_uninitialized_variables_1/VarIsInitializedOp_479" + input: "report_uninitialized_variables_1/VarIsInitializedOp_480" + input: "report_uninitialized_variables_1/VarIsInitializedOp_481" + input: "report_uninitialized_variables_1/VarIsInitializedOp_482" + input: "report_uninitialized_variables_1/VarIsInitializedOp_483" + input: "report_uninitialized_variables_1/VarIsInitializedOp_484" + input: "report_uninitialized_variables_1/VarIsInitializedOp_485" + input: "report_uninitialized_variables_1/VarIsInitializedOp_486" + input: "report_uninitialized_variables_1/VarIsInitializedOp_487" + input: "report_uninitialized_variables_1/VarIsInitializedOp_488" + input: "report_uninitialized_variables_1/VarIsInitializedOp_489" + input: "report_uninitialized_variables_1/VarIsInitializedOp_490" + input: "report_uninitialized_variables_1/VarIsInitializedOp_491" + input: "report_uninitialized_variables_1/VarIsInitializedOp_492" + input: "report_uninitialized_variables_1/VarIsInitializedOp_493" + input: "report_uninitialized_variables_1/VarIsInitializedOp_494" + input: "report_uninitialized_variables_1/VarIsInitializedOp_495" + input: "report_uninitialized_variables_1/VarIsInitializedOp_496" + input: "report_uninitialized_variables_1/VarIsInitializedOp_497" + input: "report_uninitialized_variables_1/VarIsInitializedOp_498" + input: "report_uninitialized_variables_1/VarIsInitializedOp_499" + input: "report_uninitialized_variables_1/VarIsInitializedOp_500" + input: "report_uninitialized_variables_1/VarIsInitializedOp_501" + input: "report_uninitialized_variables_1/VarIsInitializedOp_502" + input: "report_uninitialized_variables_1/VarIsInitializedOp_503" + input: "report_uninitialized_variables_1/VarIsInitializedOp_504" + input: "report_uninitialized_variables_1/VarIsInitializedOp_505" + input: "report_uninitialized_variables_1/VarIsInitializedOp_506" + input: "report_uninitialized_variables_1/VarIsInitializedOp_507" + input: "report_uninitialized_variables_1/VarIsInitializedOp_508" + input: "report_uninitialized_variables_1/VarIsInitializedOp_509" + input: "report_uninitialized_variables_1/VarIsInitializedOp_510" + input: "report_uninitialized_variables_1/VarIsInitializedOp_511" + input: "report_uninitialized_variables_1/VarIsInitializedOp_512" + input: "report_uninitialized_variables_1/VarIsInitializedOp_513" + input: "report_uninitialized_variables_1/VarIsInitializedOp_514" + input: "report_uninitialized_variables_1/VarIsInitializedOp_515" + input: "report_uninitialized_variables_1/VarIsInitializedOp_516" + input: "report_uninitialized_variables_1/VarIsInitializedOp_517" + input: "report_uninitialized_variables_1/VarIsInitializedOp_518" + input: "report_uninitialized_variables_1/VarIsInitializedOp_519" + input: "report_uninitialized_variables_1/VarIsInitializedOp_520" + input: "report_uninitialized_variables_1/VarIsInitializedOp_521" + input: "report_uninitialized_variables_1/VarIsInitializedOp_522" + input: "report_uninitialized_variables_1/VarIsInitializedOp_523" + input: "report_uninitialized_variables_1/VarIsInitializedOp_524" + input: "report_uninitialized_variables_1/VarIsInitializedOp_525" + input: "report_uninitialized_variables_1/VarIsInitializedOp_526" + input: "report_uninitialized_variables_1/VarIsInitializedOp_527" + input: "report_uninitialized_variables_1/VarIsInitializedOp_528" + input: "report_uninitialized_variables_1/VarIsInitializedOp_529" + input: "report_uninitialized_variables_1/VarIsInitializedOp_530" + input: "report_uninitialized_variables_1/VarIsInitializedOp_531" + input: "report_uninitialized_variables_1/VarIsInitializedOp_532" + input: "report_uninitialized_variables_1/VarIsInitializedOp_533" + input: "report_uninitialized_variables_1/VarIsInitializedOp_534" + input: "report_uninitialized_variables_1/VarIsInitializedOp_535" + input: "report_uninitialized_variables_1/VarIsInitializedOp_536" + input: "report_uninitialized_variables_1/VarIsInitializedOp_537" + input: "report_uninitialized_variables_1/VarIsInitializedOp_538" + input: "report_uninitialized_variables_1/VarIsInitializedOp_539" + input: "report_uninitialized_variables_1/VarIsInitializedOp_540" + input: "report_uninitialized_variables_1/VarIsInitializedOp_541" + input: "report_uninitialized_variables_1/VarIsInitializedOp_542" + input: "report_uninitialized_variables_1/VarIsInitializedOp_543" + input: "report_uninitialized_variables_1/VarIsInitializedOp_544" + input: "report_uninitialized_variables_1/VarIsInitializedOp_545" + input: "report_uninitialized_variables_1/VarIsInitializedOp_546" + input: "report_uninitialized_variables_1/VarIsInitializedOp_547" + input: "report_uninitialized_variables_1/VarIsInitializedOp_548" + input: "report_uninitialized_variables_1/VarIsInitializedOp_549" + input: "report_uninitialized_variables_1/VarIsInitializedOp_550" + input: "report_uninitialized_variables_1/VarIsInitializedOp_551" + input: "report_uninitialized_variables_1/VarIsInitializedOp_552" + input: "report_uninitialized_variables_1/VarIsInitializedOp_553" + input: "report_uninitialized_variables_1/VarIsInitializedOp_554" + input: "report_uninitialized_variables_1/VarIsInitializedOp_555" + input: "report_uninitialized_variables_1/VarIsInitializedOp_556" + input: "report_uninitialized_variables_1/VarIsInitializedOp_557" + input: "report_uninitialized_variables_1/VarIsInitializedOp_558" + input: "report_uninitialized_variables_1/VarIsInitializedOp_559" + input: "report_uninitialized_variables_1/VarIsInitializedOp_560" + input: "report_uninitialized_variables_1/VarIsInitializedOp_561" + input: "report_uninitialized_variables_1/VarIsInitializedOp_562" + input: "report_uninitialized_variables_1/VarIsInitializedOp_563" + input: "report_uninitialized_variables_1/VarIsInitializedOp_564" + input: "report_uninitialized_variables_1/VarIsInitializedOp_565" + input: "report_uninitialized_variables_1/VarIsInitializedOp_566" + input: "report_uninitialized_variables_1/VarIsInitializedOp_567" + input: "report_uninitialized_variables_1/VarIsInitializedOp_568" + input: "report_uninitialized_variables_1/VarIsInitializedOp_569" + input: "report_uninitialized_variables_1/VarIsInitializedOp_570" + input: "report_uninitialized_variables_1/VarIsInitializedOp_571" + input: "report_uninitialized_variables_1/VarIsInitializedOp_572" + input: "report_uninitialized_variables_1/VarIsInitializedOp_573" + input: "report_uninitialized_variables_1/VarIsInitializedOp_574" + input: "report_uninitialized_variables_1/VarIsInitializedOp_575" + input: "report_uninitialized_variables_1/VarIsInitializedOp_576" + input: "report_uninitialized_variables_1/VarIsInitializedOp_577" + input: "report_uninitialized_variables_1/VarIsInitializedOp_578" + input: "report_uninitialized_variables_1/VarIsInitializedOp_579" + input: "report_uninitialized_variables_1/VarIsInitializedOp_580" + input: "report_uninitialized_variables_1/VarIsInitializedOp_581" + input: "report_uninitialized_variables_1/VarIsInitializedOp_582" + input: "report_uninitialized_variables_1/VarIsInitializedOp_583" + input: "report_uninitialized_variables_1/VarIsInitializedOp_584" + input: "report_uninitialized_variables_1/VarIsInitializedOp_585" + input: "report_uninitialized_variables_1/VarIsInitializedOp_586" + input: "report_uninitialized_variables_1/VarIsInitializedOp_587" + input: "report_uninitialized_variables_1/VarIsInitializedOp_588" + input: "report_uninitialized_variables_1/VarIsInitializedOp_589" + input: "report_uninitialized_variables_1/VarIsInitializedOp_590" + input: "report_uninitialized_variables_1/VarIsInitializedOp_591" + input: "report_uninitialized_variables_1/VarIsInitializedOp_592" + input: "report_uninitialized_variables_1/VarIsInitializedOp_593" + input: "report_uninitialized_variables_1/VarIsInitializedOp_594" + input: "report_uninitialized_variables_1/VarIsInitializedOp_595" + input: "report_uninitialized_variables_1/VarIsInitializedOp_596" + input: "report_uninitialized_variables_1/VarIsInitializedOp_597" + input: "report_uninitialized_variables_1/VarIsInitializedOp_598" + input: "report_uninitialized_variables_1/VarIsInitializedOp_599" + device: "/device:CPU:0" + attr { + key: "N" + value { + i: 600 + } + } + attr { + key: "T" + value { + type: DT_BOOL + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 600 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "report_uninitialized_variables_1/LogicalNot" + op: "LogicalNot" + input: "report_uninitialized_variables_1/stack" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 600 + } + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/Const" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 600 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 600 + } + } + string_val: "global_step" + string_val: "bert/embeddings/word_embeddings" + string_val: "bert/embeddings/token_type_embeddings" + string_val: "bert/embeddings/position_embeddings" + string_val: "bert/embeddings/layer_normalization/gamma" + string_val: "bert/embeddings/layer_normalization/beta" + string_val: "bert/encoder/layer_0/attention/self/query/kernel" + string_val: "bert/encoder/layer_0/attention/self/query/bias" + string_val: "bert/encoder/layer_0/attention/self/key/kernel" + string_val: "bert/encoder/layer_0/attention/self/key/bias" + string_val: "bert/encoder/layer_0/attention/self/value/kernel" + string_val: "bert/encoder/layer_0/attention/self/value/bias" + string_val: "bert/encoder/layer_0/attention/output/dense/kernel" + string_val: "bert/encoder/layer_0/attention/output/dense/bias" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta" + string_val: "bert/encoder/layer_0/intermediate/dense/kernel" + string_val: "bert/encoder/layer_0/intermediate/dense/bias" + string_val: "bert/encoder/layer_0/output/dense/kernel" + string_val: "bert/encoder/layer_0/output/dense/bias" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/gamma" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/beta" + string_val: "bert/encoder/layer_1/attention/self/query/kernel" + string_val: "bert/encoder/layer_1/attention/self/query/bias" + string_val: "bert/encoder/layer_1/attention/self/key/kernel" + string_val: "bert/encoder/layer_1/attention/self/key/bias" + string_val: "bert/encoder/layer_1/attention/self/value/kernel" + string_val: "bert/encoder/layer_1/attention/self/value/bias" + string_val: "bert/encoder/layer_1/attention/output/dense/kernel" + string_val: "bert/encoder/layer_1/attention/output/dense/bias" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta" + string_val: "bert/encoder/layer_1/intermediate/dense/kernel" + string_val: "bert/encoder/layer_1/intermediate/dense/bias" + string_val: "bert/encoder/layer_1/output/dense/kernel" + string_val: "bert/encoder/layer_1/output/dense/bias" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/gamma" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/beta" + string_val: "bert/encoder/layer_2/attention/self/query/kernel" + string_val: "bert/encoder/layer_2/attention/self/query/bias" + string_val: "bert/encoder/layer_2/attention/self/key/kernel" + string_val: "bert/encoder/layer_2/attention/self/key/bias" + string_val: "bert/encoder/layer_2/attention/self/value/kernel" + string_val: "bert/encoder/layer_2/attention/self/value/bias" + string_val: "bert/encoder/layer_2/attention/output/dense/kernel" + string_val: "bert/encoder/layer_2/attention/output/dense/bias" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta" + string_val: "bert/encoder/layer_2/intermediate/dense/kernel" + string_val: "bert/encoder/layer_2/intermediate/dense/bias" + string_val: "bert/encoder/layer_2/output/dense/kernel" + string_val: "bert/encoder/layer_2/output/dense/bias" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/gamma" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/beta" + string_val: "bert/encoder/layer_3/attention/self/query/kernel" + string_val: "bert/encoder/layer_3/attention/self/query/bias" + string_val: "bert/encoder/layer_3/attention/self/key/kernel" + string_val: "bert/encoder/layer_3/attention/self/key/bias" + string_val: "bert/encoder/layer_3/attention/self/value/kernel" + string_val: "bert/encoder/layer_3/attention/self/value/bias" + string_val: "bert/encoder/layer_3/attention/output/dense/kernel" + string_val: "bert/encoder/layer_3/attention/output/dense/bias" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta" + string_val: "bert/encoder/layer_3/intermediate/dense/kernel" + string_val: "bert/encoder/layer_3/intermediate/dense/bias" + string_val: "bert/encoder/layer_3/output/dense/kernel" + string_val: "bert/encoder/layer_3/output/dense/bias" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/gamma" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/beta" + string_val: "bert/encoder/layer_4/attention/self/query/kernel" + string_val: "bert/encoder/layer_4/attention/self/query/bias" + string_val: "bert/encoder/layer_4/attention/self/key/kernel" + string_val: "bert/encoder/layer_4/attention/self/key/bias" + string_val: "bert/encoder/layer_4/attention/self/value/kernel" + string_val: "bert/encoder/layer_4/attention/self/value/bias" + string_val: "bert/encoder/layer_4/attention/output/dense/kernel" + string_val: "bert/encoder/layer_4/attention/output/dense/bias" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta" + string_val: "bert/encoder/layer_4/intermediate/dense/kernel" + string_val: "bert/encoder/layer_4/intermediate/dense/bias" + string_val: "bert/encoder/layer_4/output/dense/kernel" + string_val: "bert/encoder/layer_4/output/dense/bias" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/gamma" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/beta" + string_val: "bert/encoder/layer_5/attention/self/query/kernel" + string_val: "bert/encoder/layer_5/attention/self/query/bias" + string_val: "bert/encoder/layer_5/attention/self/key/kernel" + string_val: "bert/encoder/layer_5/attention/self/key/bias" + string_val: "bert/encoder/layer_5/attention/self/value/kernel" + string_val: "bert/encoder/layer_5/attention/self/value/bias" + string_val: "bert/encoder/layer_5/attention/output/dense/kernel" + string_val: "bert/encoder/layer_5/attention/output/dense/bias" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta" + string_val: "bert/encoder/layer_5/intermediate/dense/kernel" + string_val: "bert/encoder/layer_5/intermediate/dense/bias" + string_val: "bert/encoder/layer_5/output/dense/kernel" + string_val: "bert/encoder/layer_5/output/dense/bias" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/gamma" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/beta" + string_val: "bert/encoder/layer_6/attention/self/query/kernel" + string_val: "bert/encoder/layer_6/attention/self/query/bias" + string_val: "bert/encoder/layer_6/attention/self/key/kernel" + string_val: "bert/encoder/layer_6/attention/self/key/bias" + string_val: "bert/encoder/layer_6/attention/self/value/kernel" + string_val: "bert/encoder/layer_6/attention/self/value/bias" + string_val: "bert/encoder/layer_6/attention/output/dense/kernel" + string_val: "bert/encoder/layer_6/attention/output/dense/bias" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta" + string_val: "bert/encoder/layer_6/intermediate/dense/kernel" + string_val: "bert/encoder/layer_6/intermediate/dense/bias" + string_val: "bert/encoder/layer_6/output/dense/kernel" + string_val: "bert/encoder/layer_6/output/dense/bias" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/gamma" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/beta" + string_val: "bert/encoder/layer_7/attention/self/query/kernel" + string_val: "bert/encoder/layer_7/attention/self/query/bias" + string_val: "bert/encoder/layer_7/attention/self/key/kernel" + string_val: "bert/encoder/layer_7/attention/self/key/bias" + string_val: "bert/encoder/layer_7/attention/self/value/kernel" + string_val: "bert/encoder/layer_7/attention/self/value/bias" + string_val: "bert/encoder/layer_7/attention/output/dense/kernel" + string_val: "bert/encoder/layer_7/attention/output/dense/bias" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta" + string_val: "bert/encoder/layer_7/intermediate/dense/kernel" + string_val: "bert/encoder/layer_7/intermediate/dense/bias" + string_val: "bert/encoder/layer_7/output/dense/kernel" + string_val: "bert/encoder/layer_7/output/dense/bias" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/gamma" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/beta" + string_val: "bert/encoder/layer_8/attention/self/query/kernel" + string_val: "bert/encoder/layer_8/attention/self/query/bias" + string_val: "bert/encoder/layer_8/attention/self/key/kernel" + string_val: "bert/encoder/layer_8/attention/self/key/bias" + string_val: "bert/encoder/layer_8/attention/self/value/kernel" + string_val: "bert/encoder/layer_8/attention/self/value/bias" + string_val: "bert/encoder/layer_8/attention/output/dense/kernel" + string_val: "bert/encoder/layer_8/attention/output/dense/bias" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta" + string_val: "bert/encoder/layer_8/intermediate/dense/kernel" + string_val: "bert/encoder/layer_8/intermediate/dense/bias" + string_val: "bert/encoder/layer_8/output/dense/kernel" + string_val: "bert/encoder/layer_8/output/dense/bias" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/gamma" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/beta" + string_val: "bert/encoder/layer_9/attention/self/query/kernel" + string_val: "bert/encoder/layer_9/attention/self/query/bias" + string_val: "bert/encoder/layer_9/attention/self/key/kernel" + string_val: "bert/encoder/layer_9/attention/self/key/bias" + string_val: "bert/encoder/layer_9/attention/self/value/kernel" + string_val: "bert/encoder/layer_9/attention/self/value/bias" + string_val: "bert/encoder/layer_9/attention/output/dense/kernel" + string_val: "bert/encoder/layer_9/attention/output/dense/bias" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta" + string_val: "bert/encoder/layer_9/intermediate/dense/kernel" + string_val: "bert/encoder/layer_9/intermediate/dense/bias" + string_val: "bert/encoder/layer_9/output/dense/kernel" + string_val: "bert/encoder/layer_9/output/dense/bias" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/gamma" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/beta" + string_val: "bert/encoder/layer_10/attention/self/query/kernel" + string_val: "bert/encoder/layer_10/attention/self/query/bias" + string_val: "bert/encoder/layer_10/attention/self/key/kernel" + string_val: "bert/encoder/layer_10/attention/self/key/bias" + string_val: "bert/encoder/layer_10/attention/self/value/kernel" + string_val: "bert/encoder/layer_10/attention/self/value/bias" + string_val: "bert/encoder/layer_10/attention/output/dense/kernel" + string_val: "bert/encoder/layer_10/attention/output/dense/bias" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta" + string_val: "bert/encoder/layer_10/intermediate/dense/kernel" + string_val: "bert/encoder/layer_10/intermediate/dense/bias" + string_val: "bert/encoder/layer_10/output/dense/kernel" + string_val: "bert/encoder/layer_10/output/dense/bias" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/gamma" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/beta" + string_val: "bert/encoder/layer_11/attention/self/query/kernel" + string_val: "bert/encoder/layer_11/attention/self/query/bias" + string_val: "bert/encoder/layer_11/attention/self/key/kernel" + string_val: "bert/encoder/layer_11/attention/self/key/bias" + string_val: "bert/encoder/layer_11/attention/self/value/kernel" + string_val: "bert/encoder/layer_11/attention/self/value/bias" + string_val: "bert/encoder/layer_11/attention/output/dense/kernel" + string_val: "bert/encoder/layer_11/attention/output/dense/bias" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta" + string_val: "bert/encoder/layer_11/intermediate/dense/kernel" + string_val: "bert/encoder/layer_11/intermediate/dense/bias" + string_val: "bert/encoder/layer_11/output/dense/kernel" + string_val: "bert/encoder/layer_11/output/dense/bias" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/gamma" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/beta" + string_val: "bert/pooler/dense/kernel" + string_val: "bert/pooler/dense/bias" + string_val: "cls/squad/output_weights" + string_val: "cls/squad/output_bias" + string_val: "bert/embeddings/word_embeddings/adam_m" + string_val: "bert/embeddings/word_embeddings/adam_v" + string_val: "bert/embeddings/token_type_embeddings/adam_m" + string_val: "bert/embeddings/token_type_embeddings/adam_v" + string_val: "bert/embeddings/position_embeddings/adam_m" + string_val: "bert/embeddings/position_embeddings/adam_v" + string_val: "bert/embeddings/layer_normalization/gamma/adam_m" + string_val: "bert/embeddings/layer_normalization/gamma/adam_v" + string_val: "bert/embeddings/layer_normalization/beta/adam_m" + string_val: "bert/embeddings/layer_normalization/beta/adam_v" + string_val: "bert/encoder/layer_0/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_0/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_0/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_0/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_0/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_0/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_0/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_0/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_0/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_0/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_0/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_0/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_0/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_0/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v" + string_val: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_0/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_0/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_0/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_0/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_0/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_0/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v" + string_val: "bert/encoder/layer_1/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_1/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_1/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_1/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_1/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_1/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_1/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_1/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_1/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_1/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_1/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_1/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_1/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_1/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v" + string_val: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_1/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_1/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_1/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_1/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_1/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_1/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v" + string_val: "bert/encoder/layer_2/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_2/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_2/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_2/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_2/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_2/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_2/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_2/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_2/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_2/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_2/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_2/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_2/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_2/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v" + string_val: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_2/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_2/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_2/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_2/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_2/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_2/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v" + string_val: "bert/encoder/layer_3/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_3/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_3/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_3/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_3/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_3/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_3/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_3/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_3/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_3/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_3/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_3/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_3/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_3/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v" + string_val: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_3/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_3/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_3/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_3/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_3/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_3/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v" + string_val: "bert/encoder/layer_4/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_4/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_4/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_4/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_4/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_4/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_4/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_4/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_4/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_4/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_4/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_4/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_4/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_4/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v" + string_val: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_4/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_4/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_4/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_4/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_4/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_4/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v" + string_val: "bert/encoder/layer_5/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_5/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_5/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_5/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_5/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_5/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_5/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_5/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_5/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_5/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_5/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_5/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_5/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_5/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v" + string_val: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_5/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_5/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_5/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_5/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_5/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_5/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v" + string_val: "bert/encoder/layer_6/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_6/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_6/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_6/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_6/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_6/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_6/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_6/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_6/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_6/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_6/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_6/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_6/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_6/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v" + string_val: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_6/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_6/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_6/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_6/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_6/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_6/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v" + string_val: "bert/encoder/layer_7/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_7/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_7/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_7/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_7/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_7/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_7/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_7/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_7/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_7/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_7/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_7/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_7/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_7/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v" + string_val: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_7/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_7/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_7/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_7/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_7/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_7/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v" + string_val: "bert/encoder/layer_8/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_8/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_8/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_8/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_8/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_8/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_8/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_8/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_8/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_8/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_8/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_8/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_8/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_8/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v" + string_val: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_8/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_8/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_8/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_8/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_8/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_8/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v" + string_val: "bert/encoder/layer_9/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_9/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_9/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_9/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_9/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_9/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_9/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_9/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_9/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_9/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_9/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_9/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_9/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_9/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v" + string_val: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_9/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_9/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_9/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_9/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_9/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_9/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v" + string_val: "bert/encoder/layer_10/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_10/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_10/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_10/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_10/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_10/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_10/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_10/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_10/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_10/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_10/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_10/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_10/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_10/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v" + string_val: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_10/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_10/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_10/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_10/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_10/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_10/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v" + string_val: "bert/encoder/layer_11/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_11/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_11/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_11/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_11/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_11/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_11/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_11/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_11/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_11/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_11/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_11/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_11/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_11/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v" + string_val: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_11/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_11/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_11/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_11/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_11/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_11/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v" + string_val: "cls/squad/output_weights/adam_m" + string_val: "cls/squad/output_weights/adam_v" + string_val: "cls/squad/output_bias/adam_m" + string_val: "cls/squad/output_bias/adam_v" + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/Shape" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 600 + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/strided_slice/stack" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/strided_slice/stack_1" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/strided_slice/stack_2" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/strided_slice" + op: "StridedSlice" + input: "report_uninitialized_variables_1/boolean_mask/Shape" + input: "report_uninitialized_variables_1/boolean_mask/strided_slice/stack" + input: "report_uninitialized_variables_1/boolean_mask/strided_slice/stack_1" + input: "report_uninitialized_variables_1/boolean_mask/strided_slice/stack_2" + device: "/device:CPU:0" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 0 + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/Prod/reduction_indices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/Prod" + op: "Prod" + input: "report_uninitialized_variables_1/boolean_mask/strided_slice" + input: "report_uninitialized_variables_1/boolean_mask/Prod/reduction_indices" + device: "/device:CPU:0" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "keep_dims" + value { + b: false + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/Shape_1" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 600 + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/strided_slice_1/stack" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/strided_slice_1/stack_1" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/strided_slice_1/stack_2" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/strided_slice_1" + op: "StridedSlice" + input: "report_uninitialized_variables_1/boolean_mask/Shape_1" + input: "report_uninitialized_variables_1/boolean_mask/strided_slice_1/stack" + input: "report_uninitialized_variables_1/boolean_mask/strided_slice_1/stack_1" + input: "report_uninitialized_variables_1/boolean_mask/strided_slice_1/stack_2" + device: "/device:CPU:0" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "begin_mask" + value { + i: 1 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 0 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 0 + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/Shape_2" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 600 + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/strided_slice_2/stack" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/strided_slice_2/stack_1" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 0 + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/strided_slice_2/stack_2" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: 1 + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/strided_slice_2" + op: "StridedSlice" + input: "report_uninitialized_variables_1/boolean_mask/Shape_2" + input: "report_uninitialized_variables_1/boolean_mask/strided_slice_2/stack" + input: "report_uninitialized_variables_1/boolean_mask/strided_slice_2/stack_1" + input: "report_uninitialized_variables_1/boolean_mask/strided_slice_2/stack_2" + device: "/device:CPU:0" + attr { + key: "Index" + value { + type: DT_INT32 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "begin_mask" + value { + i: 0 + } + } + attr { + key: "ellipsis_mask" + value { + i: 0 + } + } + attr { + key: "end_mask" + value { + i: 1 + } + } + attr { + key: "new_axis_mask" + value { + i: 0 + } + } + attr { + key: "shrink_axis_mask" + value { + i: 0 + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/concat/values_1" + op: "Pack" + input: "report_uninitialized_variables_1/boolean_mask/Prod" + device: "/device:CPU:0" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/concat/axis" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 0 + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/concat" + op: "ConcatV2" + input: "report_uninitialized_variables_1/boolean_mask/strided_slice_1" + input: "report_uninitialized_variables_1/boolean_mask/concat/values_1" + input: "report_uninitialized_variables_1/boolean_mask/strided_slice_2" + input: "report_uninitialized_variables_1/boolean_mask/concat/axis" + device: "/device:CPU:0" + attr { + key: "N" + value { + i: 3 + } + } + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/Reshape" + op: "Reshape" + input: "report_uninitialized_variables_1/Const" + input: "report_uninitialized_variables_1/boolean_mask/concat" + device: "/device:CPU:0" + attr { + key: "T" + value { + type: DT_STRING + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 600 + } + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/Reshape_1/shape" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 1 + } + } + int_val: -1 + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/Reshape_1" + op: "Reshape" + input: "report_uninitialized_variables_1/LogicalNot" + input: "report_uninitialized_variables_1/boolean_mask/Reshape_1/shape" + device: "/device:CPU:0" + attr { + key: "T" + value { + type: DT_BOOL + } + } + attr { + key: "Tshape" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 600 + } + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/Where" + op: "Where" + input: "report_uninitialized_variables_1/boolean_mask/Reshape_1" + device: "/device:CPU:0" + attr { + key: "T" + value { + type: DT_BOOL + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + dim { + size: 1 + } + } + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/Squeeze" + op: "Squeeze" + input: "report_uninitialized_variables_1/boolean_mask/Where" + device: "/device:CPU:0" + attr { + key: "T" + value { + type: DT_INT64 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + } + } + } + attr { + key: "squeeze_dims" + value { + list { + i: 1 + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/GatherV2/axis" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 0 + } + } + } +} +node { + name: "report_uninitialized_variables_1/boolean_mask/GatherV2" + op: "GatherV2" + input: "report_uninitialized_variables_1/boolean_mask/Reshape" + input: "report_uninitialized_variables_1/boolean_mask/Squeeze" + input: "report_uninitialized_variables_1/boolean_mask/GatherV2/axis" + device: "/device:CPU:0" + attr { + key: "Taxis" + value { + type: DT_INT32 + } + } + attr { + key: "Tindices" + value { + type: DT_INT64 + } + } + attr { + key: "Tparams" + value { + type: DT_STRING + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + } + } + } + attr { + key: "batch_dims" + value { + i: 0 + } + } +} +node { + name: "report_uninitialized_resources_1/Const" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + } + } + } + } + } +} +node { + name: "concat_1/axis" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 0 + } + } + } +} +node { + name: "concat_1" + op: "ConcatV2" + input: "report_uninitialized_variables_1/boolean_mask/GatherV2" + input: "report_uninitialized_resources_1/Const" + input: "concat_1/axis" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_STRING + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: -1 + } + } + } + } + } +} +node { + name: "init_2" + op: "NoOp" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } +} +node { + name: "init_all_tables" + op: "NoOp" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } +} +node { + name: "init_3" + op: "NoOp" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } +} +node { + name: "group_deps_3" + op: "NoOp" + input: "^init_2" + input: "^init_3" + input: "^init_all_tables" +} +node { + name: "Merge/MergeSummary" + op: "MergeSummary" + input: "loss" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "save/filename/input" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "model" + } + } + } +} +node { + name: "save/filename" + op: "PlaceholderWithDefault" + input: "save/filename/input" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "shape" + value { + shape { + } + } + } +} +node { + name: "save/Const" + op: "PlaceholderWithDefault" + input: "save/filename" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "shape" + value { + shape { + } + } + } +} +node { + name: "save/StaticRegexFullMatch" + op: "StaticRegexFullMatch" + input: "save/Const" + device: "/device:CPU:*" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "pattern" + value { + s: "^s3://.*" + } + } +} +node { + name: "save/Const_1" + op: "Const" + device: "/device:CPU:*" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: ".part" + } + } + } +} +node { + name: "save/Const_2" + op: "Const" + device: "/device:CPU:*" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "_temp/part" + } + } + } +} +node { + name: "save/Select" + op: "Select" + input: "save/StaticRegexFullMatch" + input: "save/Const_1" + input: "save/Const_2" + device: "/device:CPU:*" + attr { + key: "T" + value { + type: DT_STRING + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "save/StringJoin" + op: "StringJoin" + input: "save/Const" + input: "save/Select" + device: "/device:CPU:*" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "separator" + value { + s: "" + } + } +} +node { + name: "save/num_shards" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 1 + } + } + } +} +node { + name: "save/ShardedFilename/shard" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: 0 + } + } + } +} +node { + name: "save/ShardedFilename" + op: "ShardedFilename" + input: "save/StringJoin" + input: "save/ShardedFilename/shard" + input: "save/num_shards" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "save/SaveV2/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 600 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 600 + } + } + string_val: "bert/embeddings/layer_normalization/beta" + string_val: "bert/embeddings/layer_normalization/beta/adam_m" + string_val: "bert/embeddings/layer_normalization/beta/adam_v" + string_val: "bert/embeddings/layer_normalization/gamma" + string_val: "bert/embeddings/layer_normalization/gamma/adam_m" + string_val: "bert/embeddings/layer_normalization/gamma/adam_v" + string_val: "bert/embeddings/position_embeddings" + string_val: "bert/embeddings/position_embeddings/adam_m" + string_val: "bert/embeddings/position_embeddings/adam_v" + string_val: "bert/embeddings/token_type_embeddings" + string_val: "bert/embeddings/token_type_embeddings/adam_m" + string_val: "bert/embeddings/token_type_embeddings/adam_v" + string_val: "bert/embeddings/word_embeddings" + string_val: "bert/embeddings/word_embeddings/adam_m" + string_val: "bert/embeddings/word_embeddings/adam_v" + string_val: "bert/encoder/layer_0/attention/output/dense/bias" + string_val: "bert/encoder/layer_0/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_0/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_0/attention/output/dense/kernel" + string_val: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v" + string_val: "bert/encoder/layer_0/attention/self/key/bias" + string_val: "bert/encoder/layer_0/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_0/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_0/attention/self/key/kernel" + string_val: "bert/encoder/layer_0/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_0/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_0/attention/self/query/bias" + string_val: "bert/encoder/layer_0/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_0/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_0/attention/self/query/kernel" + string_val: "bert/encoder/layer_0/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_0/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_0/attention/self/value/bias" + string_val: "bert/encoder/layer_0/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_0/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_0/attention/self/value/kernel" + string_val: "bert/encoder/layer_0/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_0/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_0/intermediate/dense/bias" + string_val: "bert/encoder/layer_0/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_0/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_0/intermediate/dense/kernel" + string_val: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_0/output/dense/bias" + string_val: "bert/encoder/layer_0/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_0/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_0/output/dense/kernel" + string_val: "bert/encoder/layer_0/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_0/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/beta" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/gamma" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v" + string_val: "bert/encoder/layer_1/attention/output/dense/bias" + string_val: "bert/encoder/layer_1/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_1/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_1/attention/output/dense/kernel" + string_val: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v" + string_val: "bert/encoder/layer_1/attention/self/key/bias" + string_val: "bert/encoder/layer_1/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_1/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_1/attention/self/key/kernel" + string_val: "bert/encoder/layer_1/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_1/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_1/attention/self/query/bias" + string_val: "bert/encoder/layer_1/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_1/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_1/attention/self/query/kernel" + string_val: "bert/encoder/layer_1/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_1/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_1/attention/self/value/bias" + string_val: "bert/encoder/layer_1/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_1/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_1/attention/self/value/kernel" + string_val: "bert/encoder/layer_1/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_1/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_1/intermediate/dense/bias" + string_val: "bert/encoder/layer_1/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_1/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_1/intermediate/dense/kernel" + string_val: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_1/output/dense/bias" + string_val: "bert/encoder/layer_1/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_1/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_1/output/dense/kernel" + string_val: "bert/encoder/layer_1/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_1/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/beta" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/gamma" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v" + string_val: "bert/encoder/layer_10/attention/output/dense/bias" + string_val: "bert/encoder/layer_10/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_10/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_10/attention/output/dense/kernel" + string_val: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v" + string_val: "bert/encoder/layer_10/attention/self/key/bias" + string_val: "bert/encoder/layer_10/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_10/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_10/attention/self/key/kernel" + string_val: "bert/encoder/layer_10/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_10/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_10/attention/self/query/bias" + string_val: "bert/encoder/layer_10/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_10/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_10/attention/self/query/kernel" + string_val: "bert/encoder/layer_10/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_10/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_10/attention/self/value/bias" + string_val: "bert/encoder/layer_10/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_10/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_10/attention/self/value/kernel" + string_val: "bert/encoder/layer_10/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_10/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_10/intermediate/dense/bias" + string_val: "bert/encoder/layer_10/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_10/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_10/intermediate/dense/kernel" + string_val: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_10/output/dense/bias" + string_val: "bert/encoder/layer_10/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_10/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_10/output/dense/kernel" + string_val: "bert/encoder/layer_10/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_10/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/beta" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/gamma" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v" + string_val: "bert/encoder/layer_11/attention/output/dense/bias" + string_val: "bert/encoder/layer_11/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_11/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_11/attention/output/dense/kernel" + string_val: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v" + string_val: "bert/encoder/layer_11/attention/self/key/bias" + string_val: "bert/encoder/layer_11/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_11/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_11/attention/self/key/kernel" + string_val: "bert/encoder/layer_11/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_11/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_11/attention/self/query/bias" + string_val: "bert/encoder/layer_11/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_11/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_11/attention/self/query/kernel" + string_val: "bert/encoder/layer_11/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_11/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_11/attention/self/value/bias" + string_val: "bert/encoder/layer_11/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_11/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_11/attention/self/value/kernel" + string_val: "bert/encoder/layer_11/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_11/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_11/intermediate/dense/bias" + string_val: "bert/encoder/layer_11/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_11/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_11/intermediate/dense/kernel" + string_val: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_11/output/dense/bias" + string_val: "bert/encoder/layer_11/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_11/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_11/output/dense/kernel" + string_val: "bert/encoder/layer_11/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_11/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/beta" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/gamma" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v" + string_val: "bert/encoder/layer_2/attention/output/dense/bias" + string_val: "bert/encoder/layer_2/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_2/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_2/attention/output/dense/kernel" + string_val: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v" + string_val: "bert/encoder/layer_2/attention/self/key/bias" + string_val: "bert/encoder/layer_2/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_2/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_2/attention/self/key/kernel" + string_val: "bert/encoder/layer_2/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_2/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_2/attention/self/query/bias" + string_val: "bert/encoder/layer_2/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_2/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_2/attention/self/query/kernel" + string_val: "bert/encoder/layer_2/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_2/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_2/attention/self/value/bias" + string_val: "bert/encoder/layer_2/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_2/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_2/attention/self/value/kernel" + string_val: "bert/encoder/layer_2/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_2/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_2/intermediate/dense/bias" + string_val: "bert/encoder/layer_2/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_2/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_2/intermediate/dense/kernel" + string_val: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_2/output/dense/bias" + string_val: "bert/encoder/layer_2/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_2/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_2/output/dense/kernel" + string_val: "bert/encoder/layer_2/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_2/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/beta" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/gamma" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v" + string_val: "bert/encoder/layer_3/attention/output/dense/bias" + string_val: "bert/encoder/layer_3/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_3/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_3/attention/output/dense/kernel" + string_val: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v" + string_val: "bert/encoder/layer_3/attention/self/key/bias" + string_val: "bert/encoder/layer_3/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_3/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_3/attention/self/key/kernel" + string_val: "bert/encoder/layer_3/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_3/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_3/attention/self/query/bias" + string_val: "bert/encoder/layer_3/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_3/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_3/attention/self/query/kernel" + string_val: "bert/encoder/layer_3/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_3/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_3/attention/self/value/bias" + string_val: "bert/encoder/layer_3/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_3/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_3/attention/self/value/kernel" + string_val: "bert/encoder/layer_3/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_3/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_3/intermediate/dense/bias" + string_val: "bert/encoder/layer_3/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_3/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_3/intermediate/dense/kernel" + string_val: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_3/output/dense/bias" + string_val: "bert/encoder/layer_3/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_3/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_3/output/dense/kernel" + string_val: "bert/encoder/layer_3/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_3/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/beta" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/gamma" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v" + string_val: "bert/encoder/layer_4/attention/output/dense/bias" + string_val: "bert/encoder/layer_4/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_4/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_4/attention/output/dense/kernel" + string_val: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v" + string_val: "bert/encoder/layer_4/attention/self/key/bias" + string_val: "bert/encoder/layer_4/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_4/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_4/attention/self/key/kernel" + string_val: "bert/encoder/layer_4/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_4/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_4/attention/self/query/bias" + string_val: "bert/encoder/layer_4/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_4/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_4/attention/self/query/kernel" + string_val: "bert/encoder/layer_4/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_4/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_4/attention/self/value/bias" + string_val: "bert/encoder/layer_4/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_4/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_4/attention/self/value/kernel" + string_val: "bert/encoder/layer_4/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_4/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_4/intermediate/dense/bias" + string_val: "bert/encoder/layer_4/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_4/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_4/intermediate/dense/kernel" + string_val: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_4/output/dense/bias" + string_val: "bert/encoder/layer_4/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_4/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_4/output/dense/kernel" + string_val: "bert/encoder/layer_4/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_4/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/beta" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/gamma" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v" + string_val: "bert/encoder/layer_5/attention/output/dense/bias" + string_val: "bert/encoder/layer_5/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_5/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_5/attention/output/dense/kernel" + string_val: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v" + string_val: "bert/encoder/layer_5/attention/self/key/bias" + string_val: "bert/encoder/layer_5/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_5/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_5/attention/self/key/kernel" + string_val: "bert/encoder/layer_5/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_5/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_5/attention/self/query/bias" + string_val: "bert/encoder/layer_5/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_5/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_5/attention/self/query/kernel" + string_val: "bert/encoder/layer_5/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_5/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_5/attention/self/value/bias" + string_val: "bert/encoder/layer_5/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_5/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_5/attention/self/value/kernel" + string_val: "bert/encoder/layer_5/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_5/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_5/intermediate/dense/bias" + string_val: "bert/encoder/layer_5/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_5/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_5/intermediate/dense/kernel" + string_val: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_5/output/dense/bias" + string_val: "bert/encoder/layer_5/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_5/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_5/output/dense/kernel" + string_val: "bert/encoder/layer_5/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_5/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/beta" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/gamma" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v" + string_val: "bert/encoder/layer_6/attention/output/dense/bias" + string_val: "bert/encoder/layer_6/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_6/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_6/attention/output/dense/kernel" + string_val: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v" + string_val: "bert/encoder/layer_6/attention/self/key/bias" + string_val: "bert/encoder/layer_6/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_6/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_6/attention/self/key/kernel" + string_val: "bert/encoder/layer_6/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_6/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_6/attention/self/query/bias" + string_val: "bert/encoder/layer_6/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_6/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_6/attention/self/query/kernel" + string_val: "bert/encoder/layer_6/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_6/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_6/attention/self/value/bias" + string_val: "bert/encoder/layer_6/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_6/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_6/attention/self/value/kernel" + string_val: "bert/encoder/layer_6/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_6/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_6/intermediate/dense/bias" + string_val: "bert/encoder/layer_6/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_6/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_6/intermediate/dense/kernel" + string_val: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_6/output/dense/bias" + string_val: "bert/encoder/layer_6/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_6/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_6/output/dense/kernel" + string_val: "bert/encoder/layer_6/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_6/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/beta" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/gamma" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v" + string_val: "bert/encoder/layer_7/attention/output/dense/bias" + string_val: "bert/encoder/layer_7/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_7/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_7/attention/output/dense/kernel" + string_val: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v" + string_val: "bert/encoder/layer_7/attention/self/key/bias" + string_val: "bert/encoder/layer_7/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_7/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_7/attention/self/key/kernel" + string_val: "bert/encoder/layer_7/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_7/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_7/attention/self/query/bias" + string_val: "bert/encoder/layer_7/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_7/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_7/attention/self/query/kernel" + string_val: "bert/encoder/layer_7/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_7/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_7/attention/self/value/bias" + string_val: "bert/encoder/layer_7/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_7/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_7/attention/self/value/kernel" + string_val: "bert/encoder/layer_7/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_7/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_7/intermediate/dense/bias" + string_val: "bert/encoder/layer_7/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_7/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_7/intermediate/dense/kernel" + string_val: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_7/output/dense/bias" + string_val: "bert/encoder/layer_7/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_7/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_7/output/dense/kernel" + string_val: "bert/encoder/layer_7/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_7/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/beta" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/gamma" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v" + string_val: "bert/encoder/layer_8/attention/output/dense/bias" + string_val: "bert/encoder/layer_8/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_8/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_8/attention/output/dense/kernel" + string_val: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v" + string_val: "bert/encoder/layer_8/attention/self/key/bias" + string_val: "bert/encoder/layer_8/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_8/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_8/attention/self/key/kernel" + string_val: "bert/encoder/layer_8/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_8/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_8/attention/self/query/bias" + string_val: "bert/encoder/layer_8/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_8/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_8/attention/self/query/kernel" + string_val: "bert/encoder/layer_8/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_8/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_8/attention/self/value/bias" + string_val: "bert/encoder/layer_8/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_8/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_8/attention/self/value/kernel" + string_val: "bert/encoder/layer_8/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_8/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_8/intermediate/dense/bias" + string_val: "bert/encoder/layer_8/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_8/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_8/intermediate/dense/kernel" + string_val: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_8/output/dense/bias" + string_val: "bert/encoder/layer_8/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_8/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_8/output/dense/kernel" + string_val: "bert/encoder/layer_8/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_8/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/beta" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/gamma" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v" + string_val: "bert/encoder/layer_9/attention/output/dense/bias" + string_val: "bert/encoder/layer_9/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_9/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_9/attention/output/dense/kernel" + string_val: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v" + string_val: "bert/encoder/layer_9/attention/self/key/bias" + string_val: "bert/encoder/layer_9/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_9/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_9/attention/self/key/kernel" + string_val: "bert/encoder/layer_9/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_9/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_9/attention/self/query/bias" + string_val: "bert/encoder/layer_9/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_9/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_9/attention/self/query/kernel" + string_val: "bert/encoder/layer_9/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_9/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_9/attention/self/value/bias" + string_val: "bert/encoder/layer_9/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_9/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_9/attention/self/value/kernel" + string_val: "bert/encoder/layer_9/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_9/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_9/intermediate/dense/bias" + string_val: "bert/encoder/layer_9/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_9/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_9/intermediate/dense/kernel" + string_val: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_9/output/dense/bias" + string_val: "bert/encoder/layer_9/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_9/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_9/output/dense/kernel" + string_val: "bert/encoder/layer_9/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_9/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/beta" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/gamma" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v" + string_val: "bert/pooler/dense/bias" + string_val: "bert/pooler/dense/kernel" + string_val: "cls/squad/output_bias" + string_val: "cls/squad/output_bias/adam_m" + string_val: "cls/squad/output_bias/adam_v" + string_val: "cls/squad/output_weights" + string_val: "cls/squad/output_weights/adam_m" + string_val: "cls/squad/output_weights/adam_v" + string_val: "global_step" + } + } + } +} +node { + name: "save/SaveV2/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 600 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 600 + } + } + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + } + } + } +} +node { + name: "save/SaveV2" + op: "SaveV2" + input: "save/ShardedFilename" + input: "save/SaveV2/tensor_names" + input: "save/SaveV2/shape_and_slices" + input: "bert/embeddings/layer_normalization/beta/Read/ReadVariableOp" + input: "bert/embeddings/layer_normalization/beta/adam_m/Read/ReadVariableOp" + input: "bert/embeddings/layer_normalization/beta/adam_v/Read/ReadVariableOp" + input: "bert/embeddings/layer_normalization/gamma/Read/ReadVariableOp" + input: "bert/embeddings/layer_normalization/gamma/adam_m/Read/ReadVariableOp" + input: "bert/embeddings/layer_normalization/gamma/adam_v/Read/ReadVariableOp" + input: "bert/embeddings/position_embeddings/Read/ReadVariableOp" + input: "bert/embeddings/position_embeddings/adam_m/Read/ReadVariableOp" + input: "bert/embeddings/position_embeddings/adam_v/Read/ReadVariableOp" + input: "bert/embeddings/token_type_embeddings/Read/ReadVariableOp" + input: "bert/embeddings/token_type_embeddings/adam_m/Read/ReadVariableOp" + input: "bert/embeddings/token_type_embeddings/adam_v/Read/ReadVariableOp" + input: "bert/embeddings/word_embeddings/Read/ReadVariableOp" + input: "bert/embeddings/word_embeddings/adam_m/Read/ReadVariableOp" + input: "bert/embeddings/word_embeddings/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/bias/Read/ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/bias/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/bias/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel/Read/ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/Read/ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v/Read/ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/Read/ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m/Read/ReadVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v/Read/ReadVariableOp" + input: "bert/pooler/dense/bias/Read/ReadVariableOp" + input: "bert/pooler/dense/kernel/Read/ReadVariableOp" + input: "cls/squad/output_bias/Read/ReadVariableOp" + input: "cls/squad/output_bias/adam_m/Read/ReadVariableOp" + input: "cls/squad/output_bias/adam_v/Read/ReadVariableOp" + input: "cls/squad/output_weights/Read/ReadVariableOp" + input: "cls/squad/output_weights/adam_m/Read/ReadVariableOp" + input: "cls/squad/output_weights/adam_v/Read/ReadVariableOp" + input: "global_step/Read/ReadVariableOp" + device: "/device:CPU:0" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_INT64 + } + } + } +} +node { + name: "save/control_dependency" + op: "Identity" + input: "save/ShardedFilename" + input: "^save/SaveV2" + device: "/device:CPU:0" + attr { + key: "T" + value { + type: DT_STRING + } + } + attr { + key: "_class" + value { + list { + s: "loc:@save/ShardedFilename" + } + } + } + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "save/MergeV2Checkpoints/checkpoint_prefixes" + op: "Pack" + input: "save/ShardedFilename" + input: "^save/control_dependency" + device: "/device:CPU:0" + attr { + key: "N" + value { + i: 1 + } + } + attr { + key: "T" + value { + type: DT_STRING + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 1 + } + } + } + } + } + attr { + key: "axis" + value { + i: 0 + } + } +} +node { + name: "save/MergeV2Checkpoints" + op: "MergeV2Checkpoints" + input: "save/MergeV2Checkpoints/checkpoint_prefixes" + input: "save/Const" + device: "/device:CPU:0" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "allow_missing_files" + value { + b: false + } + } + attr { + key: "delete_old_dirs" + value { + b: true + } + } +} +node { + name: "save/Identity" + op: "Identity" + input: "save/Const" + input: "^save/MergeV2Checkpoints" + input: "^save/control_dependency" + device: "/device:CPU:0" + attr { + key: "T" + value { + type: DT_STRING + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } +} +node { + name: "save/RestoreV2/tensor_names" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 600 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 600 + } + } + string_val: "bert/embeddings/layer_normalization/beta" + string_val: "bert/embeddings/layer_normalization/beta/adam_m" + string_val: "bert/embeddings/layer_normalization/beta/adam_v" + string_val: "bert/embeddings/layer_normalization/gamma" + string_val: "bert/embeddings/layer_normalization/gamma/adam_m" + string_val: "bert/embeddings/layer_normalization/gamma/adam_v" + string_val: "bert/embeddings/position_embeddings" + string_val: "bert/embeddings/position_embeddings/adam_m" + string_val: "bert/embeddings/position_embeddings/adam_v" + string_val: "bert/embeddings/token_type_embeddings" + string_val: "bert/embeddings/token_type_embeddings/adam_m" + string_val: "bert/embeddings/token_type_embeddings/adam_v" + string_val: "bert/embeddings/word_embeddings" + string_val: "bert/embeddings/word_embeddings/adam_m" + string_val: "bert/embeddings/word_embeddings/adam_v" + string_val: "bert/encoder/layer_0/attention/output/dense/bias" + string_val: "bert/encoder/layer_0/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_0/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_0/attention/output/dense/kernel" + string_val: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m" + string_val: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v" + string_val: "bert/encoder/layer_0/attention/self/key/bias" + string_val: "bert/encoder/layer_0/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_0/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_0/attention/self/key/kernel" + string_val: "bert/encoder/layer_0/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_0/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_0/attention/self/query/bias" + string_val: "bert/encoder/layer_0/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_0/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_0/attention/self/query/kernel" + string_val: "bert/encoder/layer_0/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_0/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_0/attention/self/value/bias" + string_val: "bert/encoder/layer_0/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_0/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_0/attention/self/value/kernel" + string_val: "bert/encoder/layer_0/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_0/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_0/intermediate/dense/bias" + string_val: "bert/encoder/layer_0/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_0/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_0/intermediate/dense/kernel" + string_val: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_0/output/dense/bias" + string_val: "bert/encoder/layer_0/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_0/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_0/output/dense/kernel" + string_val: "bert/encoder/layer_0/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_0/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/beta" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/gamma" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m" + string_val: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v" + string_val: "bert/encoder/layer_1/attention/output/dense/bias" + string_val: "bert/encoder/layer_1/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_1/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_1/attention/output/dense/kernel" + string_val: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m" + string_val: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v" + string_val: "bert/encoder/layer_1/attention/self/key/bias" + string_val: "bert/encoder/layer_1/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_1/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_1/attention/self/key/kernel" + string_val: "bert/encoder/layer_1/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_1/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_1/attention/self/query/bias" + string_val: "bert/encoder/layer_1/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_1/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_1/attention/self/query/kernel" + string_val: "bert/encoder/layer_1/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_1/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_1/attention/self/value/bias" + string_val: "bert/encoder/layer_1/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_1/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_1/attention/self/value/kernel" + string_val: "bert/encoder/layer_1/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_1/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_1/intermediate/dense/bias" + string_val: "bert/encoder/layer_1/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_1/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_1/intermediate/dense/kernel" + string_val: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_1/output/dense/bias" + string_val: "bert/encoder/layer_1/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_1/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_1/output/dense/kernel" + string_val: "bert/encoder/layer_1/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_1/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/beta" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/gamma" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m" + string_val: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v" + string_val: "bert/encoder/layer_10/attention/output/dense/bias" + string_val: "bert/encoder/layer_10/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_10/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_10/attention/output/dense/kernel" + string_val: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m" + string_val: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v" + string_val: "bert/encoder/layer_10/attention/self/key/bias" + string_val: "bert/encoder/layer_10/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_10/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_10/attention/self/key/kernel" + string_val: "bert/encoder/layer_10/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_10/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_10/attention/self/query/bias" + string_val: "bert/encoder/layer_10/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_10/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_10/attention/self/query/kernel" + string_val: "bert/encoder/layer_10/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_10/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_10/attention/self/value/bias" + string_val: "bert/encoder/layer_10/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_10/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_10/attention/self/value/kernel" + string_val: "bert/encoder/layer_10/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_10/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_10/intermediate/dense/bias" + string_val: "bert/encoder/layer_10/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_10/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_10/intermediate/dense/kernel" + string_val: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_10/output/dense/bias" + string_val: "bert/encoder/layer_10/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_10/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_10/output/dense/kernel" + string_val: "bert/encoder/layer_10/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_10/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/beta" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/gamma" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m" + string_val: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v" + string_val: "bert/encoder/layer_11/attention/output/dense/bias" + string_val: "bert/encoder/layer_11/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_11/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_11/attention/output/dense/kernel" + string_val: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m" + string_val: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v" + string_val: "bert/encoder/layer_11/attention/self/key/bias" + string_val: "bert/encoder/layer_11/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_11/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_11/attention/self/key/kernel" + string_val: "bert/encoder/layer_11/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_11/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_11/attention/self/query/bias" + string_val: "bert/encoder/layer_11/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_11/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_11/attention/self/query/kernel" + string_val: "bert/encoder/layer_11/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_11/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_11/attention/self/value/bias" + string_val: "bert/encoder/layer_11/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_11/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_11/attention/self/value/kernel" + string_val: "bert/encoder/layer_11/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_11/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_11/intermediate/dense/bias" + string_val: "bert/encoder/layer_11/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_11/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_11/intermediate/dense/kernel" + string_val: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_11/output/dense/bias" + string_val: "bert/encoder/layer_11/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_11/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_11/output/dense/kernel" + string_val: "bert/encoder/layer_11/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_11/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/beta" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/gamma" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m" + string_val: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v" + string_val: "bert/encoder/layer_2/attention/output/dense/bias" + string_val: "bert/encoder/layer_2/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_2/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_2/attention/output/dense/kernel" + string_val: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m" + string_val: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v" + string_val: "bert/encoder/layer_2/attention/self/key/bias" + string_val: "bert/encoder/layer_2/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_2/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_2/attention/self/key/kernel" + string_val: "bert/encoder/layer_2/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_2/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_2/attention/self/query/bias" + string_val: "bert/encoder/layer_2/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_2/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_2/attention/self/query/kernel" + string_val: "bert/encoder/layer_2/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_2/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_2/attention/self/value/bias" + string_val: "bert/encoder/layer_2/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_2/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_2/attention/self/value/kernel" + string_val: "bert/encoder/layer_2/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_2/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_2/intermediate/dense/bias" + string_val: "bert/encoder/layer_2/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_2/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_2/intermediate/dense/kernel" + string_val: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_2/output/dense/bias" + string_val: "bert/encoder/layer_2/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_2/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_2/output/dense/kernel" + string_val: "bert/encoder/layer_2/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_2/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/beta" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/gamma" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m" + string_val: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v" + string_val: "bert/encoder/layer_3/attention/output/dense/bias" + string_val: "bert/encoder/layer_3/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_3/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_3/attention/output/dense/kernel" + string_val: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m" + string_val: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v" + string_val: "bert/encoder/layer_3/attention/self/key/bias" + string_val: "bert/encoder/layer_3/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_3/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_3/attention/self/key/kernel" + string_val: "bert/encoder/layer_3/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_3/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_3/attention/self/query/bias" + string_val: "bert/encoder/layer_3/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_3/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_3/attention/self/query/kernel" + string_val: "bert/encoder/layer_3/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_3/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_3/attention/self/value/bias" + string_val: "bert/encoder/layer_3/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_3/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_3/attention/self/value/kernel" + string_val: "bert/encoder/layer_3/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_3/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_3/intermediate/dense/bias" + string_val: "bert/encoder/layer_3/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_3/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_3/intermediate/dense/kernel" + string_val: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_3/output/dense/bias" + string_val: "bert/encoder/layer_3/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_3/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_3/output/dense/kernel" + string_val: "bert/encoder/layer_3/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_3/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/beta" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/gamma" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m" + string_val: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v" + string_val: "bert/encoder/layer_4/attention/output/dense/bias" + string_val: "bert/encoder/layer_4/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_4/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_4/attention/output/dense/kernel" + string_val: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m" + string_val: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v" + string_val: "bert/encoder/layer_4/attention/self/key/bias" + string_val: "bert/encoder/layer_4/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_4/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_4/attention/self/key/kernel" + string_val: "bert/encoder/layer_4/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_4/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_4/attention/self/query/bias" + string_val: "bert/encoder/layer_4/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_4/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_4/attention/self/query/kernel" + string_val: "bert/encoder/layer_4/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_4/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_4/attention/self/value/bias" + string_val: "bert/encoder/layer_4/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_4/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_4/attention/self/value/kernel" + string_val: "bert/encoder/layer_4/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_4/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_4/intermediate/dense/bias" + string_val: "bert/encoder/layer_4/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_4/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_4/intermediate/dense/kernel" + string_val: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_4/output/dense/bias" + string_val: "bert/encoder/layer_4/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_4/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_4/output/dense/kernel" + string_val: "bert/encoder/layer_4/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_4/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/beta" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/gamma" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m" + string_val: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v" + string_val: "bert/encoder/layer_5/attention/output/dense/bias" + string_val: "bert/encoder/layer_5/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_5/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_5/attention/output/dense/kernel" + string_val: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m" + string_val: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v" + string_val: "bert/encoder/layer_5/attention/self/key/bias" + string_val: "bert/encoder/layer_5/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_5/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_5/attention/self/key/kernel" + string_val: "bert/encoder/layer_5/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_5/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_5/attention/self/query/bias" + string_val: "bert/encoder/layer_5/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_5/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_5/attention/self/query/kernel" + string_val: "bert/encoder/layer_5/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_5/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_5/attention/self/value/bias" + string_val: "bert/encoder/layer_5/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_5/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_5/attention/self/value/kernel" + string_val: "bert/encoder/layer_5/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_5/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_5/intermediate/dense/bias" + string_val: "bert/encoder/layer_5/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_5/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_5/intermediate/dense/kernel" + string_val: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_5/output/dense/bias" + string_val: "bert/encoder/layer_5/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_5/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_5/output/dense/kernel" + string_val: "bert/encoder/layer_5/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_5/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/beta" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/gamma" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m" + string_val: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v" + string_val: "bert/encoder/layer_6/attention/output/dense/bias" + string_val: "bert/encoder/layer_6/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_6/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_6/attention/output/dense/kernel" + string_val: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m" + string_val: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v" + string_val: "bert/encoder/layer_6/attention/self/key/bias" + string_val: "bert/encoder/layer_6/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_6/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_6/attention/self/key/kernel" + string_val: "bert/encoder/layer_6/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_6/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_6/attention/self/query/bias" + string_val: "bert/encoder/layer_6/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_6/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_6/attention/self/query/kernel" + string_val: "bert/encoder/layer_6/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_6/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_6/attention/self/value/bias" + string_val: "bert/encoder/layer_6/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_6/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_6/attention/self/value/kernel" + string_val: "bert/encoder/layer_6/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_6/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_6/intermediate/dense/bias" + string_val: "bert/encoder/layer_6/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_6/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_6/intermediate/dense/kernel" + string_val: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_6/output/dense/bias" + string_val: "bert/encoder/layer_6/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_6/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_6/output/dense/kernel" + string_val: "bert/encoder/layer_6/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_6/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/beta" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/gamma" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m" + string_val: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v" + string_val: "bert/encoder/layer_7/attention/output/dense/bias" + string_val: "bert/encoder/layer_7/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_7/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_7/attention/output/dense/kernel" + string_val: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m" + string_val: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v" + string_val: "bert/encoder/layer_7/attention/self/key/bias" + string_val: "bert/encoder/layer_7/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_7/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_7/attention/self/key/kernel" + string_val: "bert/encoder/layer_7/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_7/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_7/attention/self/query/bias" + string_val: "bert/encoder/layer_7/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_7/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_7/attention/self/query/kernel" + string_val: "bert/encoder/layer_7/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_7/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_7/attention/self/value/bias" + string_val: "bert/encoder/layer_7/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_7/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_7/attention/self/value/kernel" + string_val: "bert/encoder/layer_7/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_7/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_7/intermediate/dense/bias" + string_val: "bert/encoder/layer_7/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_7/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_7/intermediate/dense/kernel" + string_val: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_7/output/dense/bias" + string_val: "bert/encoder/layer_7/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_7/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_7/output/dense/kernel" + string_val: "bert/encoder/layer_7/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_7/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/beta" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/gamma" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m" + string_val: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v" + string_val: "bert/encoder/layer_8/attention/output/dense/bias" + string_val: "bert/encoder/layer_8/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_8/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_8/attention/output/dense/kernel" + string_val: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m" + string_val: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v" + string_val: "bert/encoder/layer_8/attention/self/key/bias" + string_val: "bert/encoder/layer_8/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_8/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_8/attention/self/key/kernel" + string_val: "bert/encoder/layer_8/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_8/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_8/attention/self/query/bias" + string_val: "bert/encoder/layer_8/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_8/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_8/attention/self/query/kernel" + string_val: "bert/encoder/layer_8/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_8/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_8/attention/self/value/bias" + string_val: "bert/encoder/layer_8/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_8/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_8/attention/self/value/kernel" + string_val: "bert/encoder/layer_8/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_8/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_8/intermediate/dense/bias" + string_val: "bert/encoder/layer_8/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_8/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_8/intermediate/dense/kernel" + string_val: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_8/output/dense/bias" + string_val: "bert/encoder/layer_8/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_8/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_8/output/dense/kernel" + string_val: "bert/encoder/layer_8/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_8/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/beta" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/gamma" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m" + string_val: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v" + string_val: "bert/encoder/layer_9/attention/output/dense/bias" + string_val: "bert/encoder/layer_9/attention/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_9/attention/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_9/attention/output/dense/kernel" + string_val: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m" + string_val: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v" + string_val: "bert/encoder/layer_9/attention/self/key/bias" + string_val: "bert/encoder/layer_9/attention/self/key/bias/adam_m" + string_val: "bert/encoder/layer_9/attention/self/key/bias/adam_v" + string_val: "bert/encoder/layer_9/attention/self/key/kernel" + string_val: "bert/encoder/layer_9/attention/self/key/kernel/adam_m" + string_val: "bert/encoder/layer_9/attention/self/key/kernel/adam_v" + string_val: "bert/encoder/layer_9/attention/self/query/bias" + string_val: "bert/encoder/layer_9/attention/self/query/bias/adam_m" + string_val: "bert/encoder/layer_9/attention/self/query/bias/adam_v" + string_val: "bert/encoder/layer_9/attention/self/query/kernel" + string_val: "bert/encoder/layer_9/attention/self/query/kernel/adam_m" + string_val: "bert/encoder/layer_9/attention/self/query/kernel/adam_v" + string_val: "bert/encoder/layer_9/attention/self/value/bias" + string_val: "bert/encoder/layer_9/attention/self/value/bias/adam_m" + string_val: "bert/encoder/layer_9/attention/self/value/bias/adam_v" + string_val: "bert/encoder/layer_9/attention/self/value/kernel" + string_val: "bert/encoder/layer_9/attention/self/value/kernel/adam_m" + string_val: "bert/encoder/layer_9/attention/self/value/kernel/adam_v" + string_val: "bert/encoder/layer_9/intermediate/dense/bias" + string_val: "bert/encoder/layer_9/intermediate/dense/bias/adam_m" + string_val: "bert/encoder/layer_9/intermediate/dense/bias/adam_v" + string_val: "bert/encoder/layer_9/intermediate/dense/kernel" + string_val: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m" + string_val: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v" + string_val: "bert/encoder/layer_9/output/dense/bias" + string_val: "bert/encoder/layer_9/output/dense/bias/adam_m" + string_val: "bert/encoder/layer_9/output/dense/bias/adam_v" + string_val: "bert/encoder/layer_9/output/dense/kernel" + string_val: "bert/encoder/layer_9/output/dense/kernel/adam_m" + string_val: "bert/encoder/layer_9/output/dense/kernel/adam_v" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/beta" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/gamma" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m" + string_val: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v" + string_val: "bert/pooler/dense/bias" + string_val: "bert/pooler/dense/kernel" + string_val: "cls/squad/output_bias" + string_val: "cls/squad/output_bias/adam_m" + string_val: "cls/squad/output_bias/adam_v" + string_val: "cls/squad/output_weights" + string_val: "cls/squad/output_weights/adam_m" + string_val: "cls/squad/output_weights/adam_v" + string_val: "global_step" + } + } + } +} +node { + name: "save/RestoreV2/shape_and_slices" + op: "Const" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 600 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 600 + } + } + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + string_val: "" + } + } + } +} +node { + name: "save/RestoreV2" + op: "RestoreV2" + input: "save/Const" + input: "save/RestoreV2/tensor_names" + input: "save/RestoreV2/shape_and_slices" + device: "/device:CPU:0" + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + shape { + unknown_rank: true + } + } + } + } + attr { + key: "dtypes" + value { + list { + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_FLOAT + type: DT_INT64 + } + } + } +} +node { + name: "save/Identity_1" + op: "Identity" + input: "save/RestoreV2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp" + op: "AssignVariableOp" + input: "bert/embeddings/layer_normalization/beta" + input: "save/Identity_1" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_2" + op: "Identity" + input: "save/RestoreV2:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_1" + op: "AssignVariableOp" + input: "bert/embeddings/layer_normalization/beta/adam_m" + input: "save/Identity_2" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_3" + op: "Identity" + input: "save/RestoreV2:2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_2" + op: "AssignVariableOp" + input: "bert/embeddings/layer_normalization/beta/adam_v" + input: "save/Identity_3" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_4" + op: "Identity" + input: "save/RestoreV2:3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_3" + op: "AssignVariableOp" + input: "bert/embeddings/layer_normalization/gamma" + input: "save/Identity_4" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_5" + op: "Identity" + input: "save/RestoreV2:4" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_4" + op: "AssignVariableOp" + input: "bert/embeddings/layer_normalization/gamma/adam_m" + input: "save/Identity_5" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_6" + op: "Identity" + input: "save/RestoreV2:5" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_5" + op: "AssignVariableOp" + input: "bert/embeddings/layer_normalization/gamma/adam_v" + input: "save/Identity_6" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_7" + op: "Identity" + input: "save/RestoreV2:6" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_6" + op: "AssignVariableOp" + input: "bert/embeddings/position_embeddings" + input: "save/Identity_7" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_8" + op: "Identity" + input: "save/RestoreV2:7" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_7" + op: "AssignVariableOp" + input: "bert/embeddings/position_embeddings/adam_m" + input: "save/Identity_8" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_9" + op: "Identity" + input: "save/RestoreV2:8" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_8" + op: "AssignVariableOp" + input: "bert/embeddings/position_embeddings/adam_v" + input: "save/Identity_9" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_10" + op: "Identity" + input: "save/RestoreV2:9" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_9" + op: "AssignVariableOp" + input: "bert/embeddings/token_type_embeddings" + input: "save/Identity_10" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_11" + op: "Identity" + input: "save/RestoreV2:10" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_10" + op: "AssignVariableOp" + input: "bert/embeddings/token_type_embeddings/adam_m" + input: "save/Identity_11" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_12" + op: "Identity" + input: "save/RestoreV2:11" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_11" + op: "AssignVariableOp" + input: "bert/embeddings/token_type_embeddings/adam_v" + input: "save/Identity_12" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_13" + op: "Identity" + input: "save/RestoreV2:12" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_12" + op: "AssignVariableOp" + input: "bert/embeddings/word_embeddings" + input: "save/Identity_13" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_14" + op: "Identity" + input: "save/RestoreV2:13" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_13" + op: "AssignVariableOp" + input: "bert/embeddings/word_embeddings/adam_m" + input: "save/Identity_14" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_15" + op: "Identity" + input: "save/RestoreV2:14" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_14" + op: "AssignVariableOp" + input: "bert/embeddings/word_embeddings/adam_v" + input: "save/Identity_15" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_16" + op: "Identity" + input: "save/RestoreV2:15" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_15" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias" + input: "save/Identity_16" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_17" + op: "Identity" + input: "save/RestoreV2:16" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_16" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_m" + input: "save/Identity_17" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_18" + op: "Identity" + input: "save/RestoreV2:17" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_17" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/bias/adam_v" + input: "save/Identity_18" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_19" + op: "Identity" + input: "save/RestoreV2:18" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_18" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel" + input: "save/Identity_19" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_20" + op: "Identity" + input: "save/RestoreV2:19" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_19" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_m" + input: "save/Identity_20" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_21" + op: "Identity" + input: "save/RestoreV2:20" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_20" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/dense/kernel/adam_v" + input: "save/Identity_21" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_22" + op: "Identity" + input: "save/RestoreV2:21" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_21" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta" + input: "save/Identity_22" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_23" + op: "Identity" + input: "save/RestoreV2:22" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_22" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_m" + input: "save/Identity_23" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_24" + op: "Identity" + input: "save/RestoreV2:23" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_23" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/beta/adam_v" + input: "save/Identity_24" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_25" + op: "Identity" + input: "save/RestoreV2:24" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_24" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma" + input: "save/Identity_25" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_26" + op: "Identity" + input: "save/RestoreV2:25" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_25" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_m" + input: "save/Identity_26" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_27" + op: "Identity" + input: "save/RestoreV2:26" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_26" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/output/layer_normalization_1/gamma/adam_v" + input: "save/Identity_27" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_28" + op: "Identity" + input: "save/RestoreV2:27" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_27" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias" + input: "save/Identity_28" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_29" + op: "Identity" + input: "save/RestoreV2:28" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_28" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_m" + input: "save/Identity_29" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_30" + op: "Identity" + input: "save/RestoreV2:29" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_29" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/key/bias/adam_v" + input: "save/Identity_30" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_31" + op: "Identity" + input: "save/RestoreV2:30" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_30" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel" + input: "save/Identity_31" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_32" + op: "Identity" + input: "save/RestoreV2:31" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_31" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_m" + input: "save/Identity_32" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_33" + op: "Identity" + input: "save/RestoreV2:32" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_32" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/key/kernel/adam_v" + input: "save/Identity_33" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_34" + op: "Identity" + input: "save/RestoreV2:33" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_33" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias" + input: "save/Identity_34" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_35" + op: "Identity" + input: "save/RestoreV2:34" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_34" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_m" + input: "save/Identity_35" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_36" + op: "Identity" + input: "save/RestoreV2:35" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_35" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/query/bias/adam_v" + input: "save/Identity_36" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_37" + op: "Identity" + input: "save/RestoreV2:36" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_36" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel" + input: "save/Identity_37" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_38" + op: "Identity" + input: "save/RestoreV2:37" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_37" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_m" + input: "save/Identity_38" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_39" + op: "Identity" + input: "save/RestoreV2:38" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_38" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/query/kernel/adam_v" + input: "save/Identity_39" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_40" + op: "Identity" + input: "save/RestoreV2:39" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_39" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias" + input: "save/Identity_40" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_41" + op: "Identity" + input: "save/RestoreV2:40" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_40" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_m" + input: "save/Identity_41" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_42" + op: "Identity" + input: "save/RestoreV2:41" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_41" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/value/bias/adam_v" + input: "save/Identity_42" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_43" + op: "Identity" + input: "save/RestoreV2:42" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_42" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel" + input: "save/Identity_43" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_44" + op: "Identity" + input: "save/RestoreV2:43" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_43" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_m" + input: "save/Identity_44" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_45" + op: "Identity" + input: "save/RestoreV2:44" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_44" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/attention/self/value/kernel/adam_v" + input: "save/Identity_45" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_46" + op: "Identity" + input: "save/RestoreV2:45" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_45" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias" + input: "save/Identity_46" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_47" + op: "Identity" + input: "save/RestoreV2:46" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_46" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_m" + input: "save/Identity_47" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_48" + op: "Identity" + input: "save/RestoreV2:47" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_47" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/bias/adam_v" + input: "save/Identity_48" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_49" + op: "Identity" + input: "save/RestoreV2:48" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_48" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel" + input: "save/Identity_49" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_50" + op: "Identity" + input: "save/RestoreV2:49" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_49" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_m" + input: "save/Identity_50" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_51" + op: "Identity" + input: "save/RestoreV2:50" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_50" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/intermediate/dense/kernel/adam_v" + input: "save/Identity_51" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_52" + op: "Identity" + input: "save/RestoreV2:51" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_51" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/dense/bias" + input: "save/Identity_52" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_53" + op: "Identity" + input: "save/RestoreV2:52" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_52" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/dense/bias/adam_m" + input: "save/Identity_53" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_54" + op: "Identity" + input: "save/RestoreV2:53" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_53" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/dense/bias/adam_v" + input: "save/Identity_54" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_55" + op: "Identity" + input: "save/RestoreV2:54" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_54" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel" + input: "save/Identity_55" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_56" + op: "Identity" + input: "save/RestoreV2:55" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_55" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel/adam_m" + input: "save/Identity_56" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_57" + op: "Identity" + input: "save/RestoreV2:56" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_56" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/dense/kernel/adam_v" + input: "save/Identity_57" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_58" + op: "Identity" + input: "save/RestoreV2:57" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_57" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta" + input: "save/Identity_58" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_59" + op: "Identity" + input: "save/RestoreV2:58" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_58" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_m" + input: "save/Identity_59" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_60" + op: "Identity" + input: "save/RestoreV2:59" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_59" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/beta/adam_v" + input: "save/Identity_60" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_61" + op: "Identity" + input: "save/RestoreV2:60" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_60" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma" + input: "save/Identity_61" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_62" + op: "Identity" + input: "save/RestoreV2:61" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_61" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_m" + input: "save/Identity_62" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_63" + op: "Identity" + input: "save/RestoreV2:62" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_62" + op: "AssignVariableOp" + input: "bert/encoder/layer_0/output/layer_normalization_2/gamma/adam_v" + input: "save/Identity_63" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_64" + op: "Identity" + input: "save/RestoreV2:63" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_63" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias" + input: "save/Identity_64" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_65" + op: "Identity" + input: "save/RestoreV2:64" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_64" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_m" + input: "save/Identity_65" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_66" + op: "Identity" + input: "save/RestoreV2:65" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_65" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/bias/adam_v" + input: "save/Identity_66" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_67" + op: "Identity" + input: "save/RestoreV2:66" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_66" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel" + input: "save/Identity_67" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_68" + op: "Identity" + input: "save/RestoreV2:67" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_67" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_m" + input: "save/Identity_68" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_69" + op: "Identity" + input: "save/RestoreV2:68" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_68" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/dense/kernel/adam_v" + input: "save/Identity_69" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_70" + op: "Identity" + input: "save/RestoreV2:69" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_69" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta" + input: "save/Identity_70" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_71" + op: "Identity" + input: "save/RestoreV2:70" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_70" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_m" + input: "save/Identity_71" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_72" + op: "Identity" + input: "save/RestoreV2:71" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_71" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/beta/adam_v" + input: "save/Identity_72" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_73" + op: "Identity" + input: "save/RestoreV2:72" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_72" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma" + input: "save/Identity_73" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_74" + op: "Identity" + input: "save/RestoreV2:73" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_73" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_m" + input: "save/Identity_74" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_75" + op: "Identity" + input: "save/RestoreV2:74" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_74" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/output/layer_normalization_3/gamma/adam_v" + input: "save/Identity_75" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_76" + op: "Identity" + input: "save/RestoreV2:75" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_75" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias" + input: "save/Identity_76" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_77" + op: "Identity" + input: "save/RestoreV2:76" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_76" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_m" + input: "save/Identity_77" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_78" + op: "Identity" + input: "save/RestoreV2:77" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_77" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/key/bias/adam_v" + input: "save/Identity_78" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_79" + op: "Identity" + input: "save/RestoreV2:78" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_78" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel" + input: "save/Identity_79" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_80" + op: "Identity" + input: "save/RestoreV2:79" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_79" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_m" + input: "save/Identity_80" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_81" + op: "Identity" + input: "save/RestoreV2:80" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_80" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/key/kernel/adam_v" + input: "save/Identity_81" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_82" + op: "Identity" + input: "save/RestoreV2:81" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_81" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias" + input: "save/Identity_82" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_83" + op: "Identity" + input: "save/RestoreV2:82" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_82" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_m" + input: "save/Identity_83" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_84" + op: "Identity" + input: "save/RestoreV2:83" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_83" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/query/bias/adam_v" + input: "save/Identity_84" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_85" + op: "Identity" + input: "save/RestoreV2:84" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_84" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel" + input: "save/Identity_85" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_86" + op: "Identity" + input: "save/RestoreV2:85" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_85" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_m" + input: "save/Identity_86" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_87" + op: "Identity" + input: "save/RestoreV2:86" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_86" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/query/kernel/adam_v" + input: "save/Identity_87" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_88" + op: "Identity" + input: "save/RestoreV2:87" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_87" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias" + input: "save/Identity_88" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_89" + op: "Identity" + input: "save/RestoreV2:88" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_88" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_m" + input: "save/Identity_89" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_90" + op: "Identity" + input: "save/RestoreV2:89" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_89" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/value/bias/adam_v" + input: "save/Identity_90" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_91" + op: "Identity" + input: "save/RestoreV2:90" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_90" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel" + input: "save/Identity_91" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_92" + op: "Identity" + input: "save/RestoreV2:91" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_91" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_m" + input: "save/Identity_92" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_93" + op: "Identity" + input: "save/RestoreV2:92" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_92" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/attention/self/value/kernel/adam_v" + input: "save/Identity_93" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_94" + op: "Identity" + input: "save/RestoreV2:93" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_93" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias" + input: "save/Identity_94" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_95" + op: "Identity" + input: "save/RestoreV2:94" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_94" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_m" + input: "save/Identity_95" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_96" + op: "Identity" + input: "save/RestoreV2:95" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_95" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/bias/adam_v" + input: "save/Identity_96" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_97" + op: "Identity" + input: "save/RestoreV2:96" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_96" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel" + input: "save/Identity_97" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_98" + op: "Identity" + input: "save/RestoreV2:97" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_97" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_m" + input: "save/Identity_98" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_99" + op: "Identity" + input: "save/RestoreV2:98" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_98" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/intermediate/dense/kernel/adam_v" + input: "save/Identity_99" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_100" + op: "Identity" + input: "save/RestoreV2:99" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_99" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/dense/bias" + input: "save/Identity_100" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_101" + op: "Identity" + input: "save/RestoreV2:100" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_100" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/dense/bias/adam_m" + input: "save/Identity_101" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_102" + op: "Identity" + input: "save/RestoreV2:101" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_101" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/dense/bias/adam_v" + input: "save/Identity_102" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_103" + op: "Identity" + input: "save/RestoreV2:102" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_102" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel" + input: "save/Identity_103" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_104" + op: "Identity" + input: "save/RestoreV2:103" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_103" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel/adam_m" + input: "save/Identity_104" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_105" + op: "Identity" + input: "save/RestoreV2:104" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_104" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/dense/kernel/adam_v" + input: "save/Identity_105" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_106" + op: "Identity" + input: "save/RestoreV2:105" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_105" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta" + input: "save/Identity_106" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_107" + op: "Identity" + input: "save/RestoreV2:106" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_106" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_m" + input: "save/Identity_107" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_108" + op: "Identity" + input: "save/RestoreV2:107" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_107" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/beta/adam_v" + input: "save/Identity_108" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_109" + op: "Identity" + input: "save/RestoreV2:108" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_108" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma" + input: "save/Identity_109" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_110" + op: "Identity" + input: "save/RestoreV2:109" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_109" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_m" + input: "save/Identity_110" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_111" + op: "Identity" + input: "save/RestoreV2:110" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_110" + op: "AssignVariableOp" + input: "bert/encoder/layer_1/output/layer_normalization_4/gamma/adam_v" + input: "save/Identity_111" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_112" + op: "Identity" + input: "save/RestoreV2:111" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_111" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias" + input: "save/Identity_112" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_113" + op: "Identity" + input: "save/RestoreV2:112" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_112" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_m" + input: "save/Identity_113" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_114" + op: "Identity" + input: "save/RestoreV2:113" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_113" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/bias/adam_v" + input: "save/Identity_114" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_115" + op: "Identity" + input: "save/RestoreV2:114" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_114" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel" + input: "save/Identity_115" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_116" + op: "Identity" + input: "save/RestoreV2:115" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_115" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_m" + input: "save/Identity_116" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_117" + op: "Identity" + input: "save/RestoreV2:116" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_116" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/dense/kernel/adam_v" + input: "save/Identity_117" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_118" + op: "Identity" + input: "save/RestoreV2:117" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_117" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta" + input: "save/Identity_118" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_119" + op: "Identity" + input: "save/RestoreV2:118" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_118" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_m" + input: "save/Identity_119" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_120" + op: "Identity" + input: "save/RestoreV2:119" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_119" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/beta/adam_v" + input: "save/Identity_120" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_121" + op: "Identity" + input: "save/RestoreV2:120" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_120" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma" + input: "save/Identity_121" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_122" + op: "Identity" + input: "save/RestoreV2:121" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_121" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_m" + input: "save/Identity_122" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_123" + op: "Identity" + input: "save/RestoreV2:122" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_122" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/output/layer_normalization_21/gamma/adam_v" + input: "save/Identity_123" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_124" + op: "Identity" + input: "save/RestoreV2:123" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_123" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias" + input: "save/Identity_124" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_125" + op: "Identity" + input: "save/RestoreV2:124" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_124" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_m" + input: "save/Identity_125" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_126" + op: "Identity" + input: "save/RestoreV2:125" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_125" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/key/bias/adam_v" + input: "save/Identity_126" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_127" + op: "Identity" + input: "save/RestoreV2:126" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_126" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel" + input: "save/Identity_127" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_128" + op: "Identity" + input: "save/RestoreV2:127" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_127" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_m" + input: "save/Identity_128" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_129" + op: "Identity" + input: "save/RestoreV2:128" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_128" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/key/kernel/adam_v" + input: "save/Identity_129" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_130" + op: "Identity" + input: "save/RestoreV2:129" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_129" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias" + input: "save/Identity_130" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_131" + op: "Identity" + input: "save/RestoreV2:130" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_130" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_m" + input: "save/Identity_131" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_132" + op: "Identity" + input: "save/RestoreV2:131" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_131" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/query/bias/adam_v" + input: "save/Identity_132" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_133" + op: "Identity" + input: "save/RestoreV2:132" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_132" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel" + input: "save/Identity_133" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_134" + op: "Identity" + input: "save/RestoreV2:133" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_133" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_m" + input: "save/Identity_134" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_135" + op: "Identity" + input: "save/RestoreV2:134" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_134" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/query/kernel/adam_v" + input: "save/Identity_135" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_136" + op: "Identity" + input: "save/RestoreV2:135" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_135" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias" + input: "save/Identity_136" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_137" + op: "Identity" + input: "save/RestoreV2:136" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_136" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_m" + input: "save/Identity_137" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_138" + op: "Identity" + input: "save/RestoreV2:137" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_137" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/value/bias/adam_v" + input: "save/Identity_138" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_139" + op: "Identity" + input: "save/RestoreV2:138" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_138" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel" + input: "save/Identity_139" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_140" + op: "Identity" + input: "save/RestoreV2:139" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_139" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_m" + input: "save/Identity_140" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_141" + op: "Identity" + input: "save/RestoreV2:140" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_140" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/attention/self/value/kernel/adam_v" + input: "save/Identity_141" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_142" + op: "Identity" + input: "save/RestoreV2:141" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_141" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias" + input: "save/Identity_142" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_143" + op: "Identity" + input: "save/RestoreV2:142" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_142" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_m" + input: "save/Identity_143" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_144" + op: "Identity" + input: "save/RestoreV2:143" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_143" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/bias/adam_v" + input: "save/Identity_144" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_145" + op: "Identity" + input: "save/RestoreV2:144" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_144" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel" + input: "save/Identity_145" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_146" + op: "Identity" + input: "save/RestoreV2:145" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_145" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_m" + input: "save/Identity_146" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_147" + op: "Identity" + input: "save/RestoreV2:146" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_146" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/intermediate/dense/kernel/adam_v" + input: "save/Identity_147" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_148" + op: "Identity" + input: "save/RestoreV2:147" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_147" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/dense/bias" + input: "save/Identity_148" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_149" + op: "Identity" + input: "save/RestoreV2:148" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_148" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/dense/bias/adam_m" + input: "save/Identity_149" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_150" + op: "Identity" + input: "save/RestoreV2:149" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_149" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/dense/bias/adam_v" + input: "save/Identity_150" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_151" + op: "Identity" + input: "save/RestoreV2:150" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_150" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel" + input: "save/Identity_151" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_152" + op: "Identity" + input: "save/RestoreV2:151" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_151" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel/adam_m" + input: "save/Identity_152" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_153" + op: "Identity" + input: "save/RestoreV2:152" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_152" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/dense/kernel/adam_v" + input: "save/Identity_153" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_154" + op: "Identity" + input: "save/RestoreV2:153" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_153" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta" + input: "save/Identity_154" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_155" + op: "Identity" + input: "save/RestoreV2:154" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_154" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_m" + input: "save/Identity_155" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_156" + op: "Identity" + input: "save/RestoreV2:155" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_155" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/beta/adam_v" + input: "save/Identity_156" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_157" + op: "Identity" + input: "save/RestoreV2:156" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_156" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma" + input: "save/Identity_157" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_158" + op: "Identity" + input: "save/RestoreV2:157" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_157" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_m" + input: "save/Identity_158" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_159" + op: "Identity" + input: "save/RestoreV2:158" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_158" + op: "AssignVariableOp" + input: "bert/encoder/layer_10/output/layer_normalization_22/gamma/adam_v" + input: "save/Identity_159" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_160" + op: "Identity" + input: "save/RestoreV2:159" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_159" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias" + input: "save/Identity_160" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_161" + op: "Identity" + input: "save/RestoreV2:160" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_160" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_m" + input: "save/Identity_161" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_162" + op: "Identity" + input: "save/RestoreV2:161" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_161" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/bias/adam_v" + input: "save/Identity_162" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_163" + op: "Identity" + input: "save/RestoreV2:162" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_162" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel" + input: "save/Identity_163" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_164" + op: "Identity" + input: "save/RestoreV2:163" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_163" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_m" + input: "save/Identity_164" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_165" + op: "Identity" + input: "save/RestoreV2:164" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_164" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/dense/kernel/adam_v" + input: "save/Identity_165" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_166" + op: "Identity" + input: "save/RestoreV2:165" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_165" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta" + input: "save/Identity_166" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_167" + op: "Identity" + input: "save/RestoreV2:166" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_166" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_m" + input: "save/Identity_167" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_168" + op: "Identity" + input: "save/RestoreV2:167" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_167" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/beta/adam_v" + input: "save/Identity_168" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_169" + op: "Identity" + input: "save/RestoreV2:168" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_168" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma" + input: "save/Identity_169" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_170" + op: "Identity" + input: "save/RestoreV2:169" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_169" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_m" + input: "save/Identity_170" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_171" + op: "Identity" + input: "save/RestoreV2:170" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_170" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/output/layer_normalization_23/gamma/adam_v" + input: "save/Identity_171" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_172" + op: "Identity" + input: "save/RestoreV2:171" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_171" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias" + input: "save/Identity_172" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_173" + op: "Identity" + input: "save/RestoreV2:172" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_172" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_m" + input: "save/Identity_173" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_174" + op: "Identity" + input: "save/RestoreV2:173" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_173" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/key/bias/adam_v" + input: "save/Identity_174" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_175" + op: "Identity" + input: "save/RestoreV2:174" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_174" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel" + input: "save/Identity_175" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_176" + op: "Identity" + input: "save/RestoreV2:175" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_175" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_m" + input: "save/Identity_176" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_177" + op: "Identity" + input: "save/RestoreV2:176" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_176" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/key/kernel/adam_v" + input: "save/Identity_177" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_178" + op: "Identity" + input: "save/RestoreV2:177" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_177" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias" + input: "save/Identity_178" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_179" + op: "Identity" + input: "save/RestoreV2:178" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_178" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_m" + input: "save/Identity_179" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_180" + op: "Identity" + input: "save/RestoreV2:179" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_179" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/query/bias/adam_v" + input: "save/Identity_180" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_181" + op: "Identity" + input: "save/RestoreV2:180" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_180" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel" + input: "save/Identity_181" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_182" + op: "Identity" + input: "save/RestoreV2:181" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_181" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_m" + input: "save/Identity_182" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_183" + op: "Identity" + input: "save/RestoreV2:182" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_182" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/query/kernel/adam_v" + input: "save/Identity_183" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_184" + op: "Identity" + input: "save/RestoreV2:183" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_183" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias" + input: "save/Identity_184" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_185" + op: "Identity" + input: "save/RestoreV2:184" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_184" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_m" + input: "save/Identity_185" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_186" + op: "Identity" + input: "save/RestoreV2:185" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_185" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/value/bias/adam_v" + input: "save/Identity_186" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_187" + op: "Identity" + input: "save/RestoreV2:186" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_186" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel" + input: "save/Identity_187" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_188" + op: "Identity" + input: "save/RestoreV2:187" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_187" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_m" + input: "save/Identity_188" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_189" + op: "Identity" + input: "save/RestoreV2:188" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_188" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/attention/self/value/kernel/adam_v" + input: "save/Identity_189" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_190" + op: "Identity" + input: "save/RestoreV2:189" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_189" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias" + input: "save/Identity_190" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_191" + op: "Identity" + input: "save/RestoreV2:190" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_190" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_m" + input: "save/Identity_191" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_192" + op: "Identity" + input: "save/RestoreV2:191" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_191" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/bias/adam_v" + input: "save/Identity_192" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_193" + op: "Identity" + input: "save/RestoreV2:192" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_192" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel" + input: "save/Identity_193" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_194" + op: "Identity" + input: "save/RestoreV2:193" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_193" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_m" + input: "save/Identity_194" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_195" + op: "Identity" + input: "save/RestoreV2:194" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_194" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/intermediate/dense/kernel/adam_v" + input: "save/Identity_195" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_196" + op: "Identity" + input: "save/RestoreV2:195" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_195" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/dense/bias" + input: "save/Identity_196" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_197" + op: "Identity" + input: "save/RestoreV2:196" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_196" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/dense/bias/adam_m" + input: "save/Identity_197" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_198" + op: "Identity" + input: "save/RestoreV2:197" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_197" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/dense/bias/adam_v" + input: "save/Identity_198" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_199" + op: "Identity" + input: "save/RestoreV2:198" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_198" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel" + input: "save/Identity_199" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_200" + op: "Identity" + input: "save/RestoreV2:199" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_199" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel/adam_m" + input: "save/Identity_200" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_201" + op: "Identity" + input: "save/RestoreV2:200" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_200" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/dense/kernel/adam_v" + input: "save/Identity_201" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_202" + op: "Identity" + input: "save/RestoreV2:201" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_201" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta" + input: "save/Identity_202" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_203" + op: "Identity" + input: "save/RestoreV2:202" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_202" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_m" + input: "save/Identity_203" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_204" + op: "Identity" + input: "save/RestoreV2:203" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_203" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/beta/adam_v" + input: "save/Identity_204" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_205" + op: "Identity" + input: "save/RestoreV2:204" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_204" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma" + input: "save/Identity_205" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_206" + op: "Identity" + input: "save/RestoreV2:205" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_205" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_m" + input: "save/Identity_206" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_207" + op: "Identity" + input: "save/RestoreV2:206" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_206" + op: "AssignVariableOp" + input: "bert/encoder/layer_11/output/layer_normalization_24/gamma/adam_v" + input: "save/Identity_207" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_208" + op: "Identity" + input: "save/RestoreV2:207" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_207" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias" + input: "save/Identity_208" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_209" + op: "Identity" + input: "save/RestoreV2:208" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_208" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_m" + input: "save/Identity_209" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_210" + op: "Identity" + input: "save/RestoreV2:209" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_209" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/bias/adam_v" + input: "save/Identity_210" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_211" + op: "Identity" + input: "save/RestoreV2:210" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_210" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel" + input: "save/Identity_211" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_212" + op: "Identity" + input: "save/RestoreV2:211" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_211" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_m" + input: "save/Identity_212" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_213" + op: "Identity" + input: "save/RestoreV2:212" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_212" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/dense/kernel/adam_v" + input: "save/Identity_213" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_214" + op: "Identity" + input: "save/RestoreV2:213" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_213" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta" + input: "save/Identity_214" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_215" + op: "Identity" + input: "save/RestoreV2:214" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_214" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_m" + input: "save/Identity_215" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_216" + op: "Identity" + input: "save/RestoreV2:215" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_215" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/beta/adam_v" + input: "save/Identity_216" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_217" + op: "Identity" + input: "save/RestoreV2:216" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_216" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma" + input: "save/Identity_217" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_218" + op: "Identity" + input: "save/RestoreV2:217" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_217" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_m" + input: "save/Identity_218" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_219" + op: "Identity" + input: "save/RestoreV2:218" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_218" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/output/layer_normalization_5/gamma/adam_v" + input: "save/Identity_219" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_220" + op: "Identity" + input: "save/RestoreV2:219" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_219" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias" + input: "save/Identity_220" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_221" + op: "Identity" + input: "save/RestoreV2:220" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_220" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_m" + input: "save/Identity_221" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_222" + op: "Identity" + input: "save/RestoreV2:221" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_221" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/key/bias/adam_v" + input: "save/Identity_222" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_223" + op: "Identity" + input: "save/RestoreV2:222" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_222" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel" + input: "save/Identity_223" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_224" + op: "Identity" + input: "save/RestoreV2:223" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_223" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_m" + input: "save/Identity_224" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_225" + op: "Identity" + input: "save/RestoreV2:224" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_224" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/key/kernel/adam_v" + input: "save/Identity_225" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_226" + op: "Identity" + input: "save/RestoreV2:225" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_225" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias" + input: "save/Identity_226" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_227" + op: "Identity" + input: "save/RestoreV2:226" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_226" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_m" + input: "save/Identity_227" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_228" + op: "Identity" + input: "save/RestoreV2:227" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_227" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/query/bias/adam_v" + input: "save/Identity_228" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_229" + op: "Identity" + input: "save/RestoreV2:228" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_228" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel" + input: "save/Identity_229" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_230" + op: "Identity" + input: "save/RestoreV2:229" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_229" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_m" + input: "save/Identity_230" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_231" + op: "Identity" + input: "save/RestoreV2:230" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_230" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/query/kernel/adam_v" + input: "save/Identity_231" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_232" + op: "Identity" + input: "save/RestoreV2:231" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_231" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias" + input: "save/Identity_232" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_233" + op: "Identity" + input: "save/RestoreV2:232" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_232" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_m" + input: "save/Identity_233" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_234" + op: "Identity" + input: "save/RestoreV2:233" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_233" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/value/bias/adam_v" + input: "save/Identity_234" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_235" + op: "Identity" + input: "save/RestoreV2:234" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_234" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel" + input: "save/Identity_235" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_236" + op: "Identity" + input: "save/RestoreV2:235" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_235" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_m" + input: "save/Identity_236" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_237" + op: "Identity" + input: "save/RestoreV2:236" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_236" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/attention/self/value/kernel/adam_v" + input: "save/Identity_237" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_238" + op: "Identity" + input: "save/RestoreV2:237" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_237" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias" + input: "save/Identity_238" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_239" + op: "Identity" + input: "save/RestoreV2:238" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_238" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_m" + input: "save/Identity_239" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_240" + op: "Identity" + input: "save/RestoreV2:239" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_239" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/bias/adam_v" + input: "save/Identity_240" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_241" + op: "Identity" + input: "save/RestoreV2:240" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_240" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel" + input: "save/Identity_241" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_242" + op: "Identity" + input: "save/RestoreV2:241" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_241" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_m" + input: "save/Identity_242" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_243" + op: "Identity" + input: "save/RestoreV2:242" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_242" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/intermediate/dense/kernel/adam_v" + input: "save/Identity_243" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_244" + op: "Identity" + input: "save/RestoreV2:243" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_243" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/dense/bias" + input: "save/Identity_244" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_245" + op: "Identity" + input: "save/RestoreV2:244" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_244" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/dense/bias/adam_m" + input: "save/Identity_245" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_246" + op: "Identity" + input: "save/RestoreV2:245" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_245" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/dense/bias/adam_v" + input: "save/Identity_246" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_247" + op: "Identity" + input: "save/RestoreV2:246" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_246" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel" + input: "save/Identity_247" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_248" + op: "Identity" + input: "save/RestoreV2:247" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_247" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel/adam_m" + input: "save/Identity_248" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_249" + op: "Identity" + input: "save/RestoreV2:248" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_248" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/dense/kernel/adam_v" + input: "save/Identity_249" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_250" + op: "Identity" + input: "save/RestoreV2:249" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_249" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta" + input: "save/Identity_250" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_251" + op: "Identity" + input: "save/RestoreV2:250" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_250" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_m" + input: "save/Identity_251" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_252" + op: "Identity" + input: "save/RestoreV2:251" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_251" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/beta/adam_v" + input: "save/Identity_252" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_253" + op: "Identity" + input: "save/RestoreV2:252" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_252" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma" + input: "save/Identity_253" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_254" + op: "Identity" + input: "save/RestoreV2:253" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_253" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_m" + input: "save/Identity_254" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_255" + op: "Identity" + input: "save/RestoreV2:254" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_254" + op: "AssignVariableOp" + input: "bert/encoder/layer_2/output/layer_normalization_6/gamma/adam_v" + input: "save/Identity_255" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_256" + op: "Identity" + input: "save/RestoreV2:255" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_255" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias" + input: "save/Identity_256" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_257" + op: "Identity" + input: "save/RestoreV2:256" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_256" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_m" + input: "save/Identity_257" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_258" + op: "Identity" + input: "save/RestoreV2:257" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_257" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/bias/adam_v" + input: "save/Identity_258" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_259" + op: "Identity" + input: "save/RestoreV2:258" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_258" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel" + input: "save/Identity_259" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_260" + op: "Identity" + input: "save/RestoreV2:259" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_259" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_m" + input: "save/Identity_260" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_261" + op: "Identity" + input: "save/RestoreV2:260" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_260" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/dense/kernel/adam_v" + input: "save/Identity_261" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_262" + op: "Identity" + input: "save/RestoreV2:261" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_261" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta" + input: "save/Identity_262" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_263" + op: "Identity" + input: "save/RestoreV2:262" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_262" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_m" + input: "save/Identity_263" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_264" + op: "Identity" + input: "save/RestoreV2:263" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_263" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/beta/adam_v" + input: "save/Identity_264" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_265" + op: "Identity" + input: "save/RestoreV2:264" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_264" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma" + input: "save/Identity_265" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_266" + op: "Identity" + input: "save/RestoreV2:265" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_265" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_m" + input: "save/Identity_266" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_267" + op: "Identity" + input: "save/RestoreV2:266" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_266" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/output/layer_normalization_7/gamma/adam_v" + input: "save/Identity_267" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_268" + op: "Identity" + input: "save/RestoreV2:267" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_267" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias" + input: "save/Identity_268" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_269" + op: "Identity" + input: "save/RestoreV2:268" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_268" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_m" + input: "save/Identity_269" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_270" + op: "Identity" + input: "save/RestoreV2:269" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_269" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/key/bias/adam_v" + input: "save/Identity_270" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_271" + op: "Identity" + input: "save/RestoreV2:270" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_270" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel" + input: "save/Identity_271" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_272" + op: "Identity" + input: "save/RestoreV2:271" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_271" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_m" + input: "save/Identity_272" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_273" + op: "Identity" + input: "save/RestoreV2:272" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_272" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/key/kernel/adam_v" + input: "save/Identity_273" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_274" + op: "Identity" + input: "save/RestoreV2:273" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_273" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias" + input: "save/Identity_274" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_275" + op: "Identity" + input: "save/RestoreV2:274" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_274" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_m" + input: "save/Identity_275" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_276" + op: "Identity" + input: "save/RestoreV2:275" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_275" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/query/bias/adam_v" + input: "save/Identity_276" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_277" + op: "Identity" + input: "save/RestoreV2:276" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_276" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel" + input: "save/Identity_277" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_278" + op: "Identity" + input: "save/RestoreV2:277" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_277" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_m" + input: "save/Identity_278" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_279" + op: "Identity" + input: "save/RestoreV2:278" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_278" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/query/kernel/adam_v" + input: "save/Identity_279" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_280" + op: "Identity" + input: "save/RestoreV2:279" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_279" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias" + input: "save/Identity_280" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_281" + op: "Identity" + input: "save/RestoreV2:280" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_280" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_m" + input: "save/Identity_281" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_282" + op: "Identity" + input: "save/RestoreV2:281" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_281" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/value/bias/adam_v" + input: "save/Identity_282" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_283" + op: "Identity" + input: "save/RestoreV2:282" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_282" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel" + input: "save/Identity_283" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_284" + op: "Identity" + input: "save/RestoreV2:283" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_283" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_m" + input: "save/Identity_284" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_285" + op: "Identity" + input: "save/RestoreV2:284" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_284" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/attention/self/value/kernel/adam_v" + input: "save/Identity_285" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_286" + op: "Identity" + input: "save/RestoreV2:285" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_285" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias" + input: "save/Identity_286" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_287" + op: "Identity" + input: "save/RestoreV2:286" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_286" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_m" + input: "save/Identity_287" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_288" + op: "Identity" + input: "save/RestoreV2:287" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_287" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/bias/adam_v" + input: "save/Identity_288" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_289" + op: "Identity" + input: "save/RestoreV2:288" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_288" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel" + input: "save/Identity_289" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_290" + op: "Identity" + input: "save/RestoreV2:289" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_289" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_m" + input: "save/Identity_290" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_291" + op: "Identity" + input: "save/RestoreV2:290" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_290" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/intermediate/dense/kernel/adam_v" + input: "save/Identity_291" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_292" + op: "Identity" + input: "save/RestoreV2:291" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_291" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/dense/bias" + input: "save/Identity_292" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_293" + op: "Identity" + input: "save/RestoreV2:292" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_292" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/dense/bias/adam_m" + input: "save/Identity_293" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_294" + op: "Identity" + input: "save/RestoreV2:293" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_293" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/dense/bias/adam_v" + input: "save/Identity_294" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_295" + op: "Identity" + input: "save/RestoreV2:294" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_294" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel" + input: "save/Identity_295" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_296" + op: "Identity" + input: "save/RestoreV2:295" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_295" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel/adam_m" + input: "save/Identity_296" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_297" + op: "Identity" + input: "save/RestoreV2:296" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_296" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/dense/kernel/adam_v" + input: "save/Identity_297" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_298" + op: "Identity" + input: "save/RestoreV2:297" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_297" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta" + input: "save/Identity_298" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_299" + op: "Identity" + input: "save/RestoreV2:298" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_298" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_m" + input: "save/Identity_299" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_300" + op: "Identity" + input: "save/RestoreV2:299" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_299" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/beta/adam_v" + input: "save/Identity_300" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_301" + op: "Identity" + input: "save/RestoreV2:300" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_300" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma" + input: "save/Identity_301" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_302" + op: "Identity" + input: "save/RestoreV2:301" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_301" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_m" + input: "save/Identity_302" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_303" + op: "Identity" + input: "save/RestoreV2:302" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_302" + op: "AssignVariableOp" + input: "bert/encoder/layer_3/output/layer_normalization_8/gamma/adam_v" + input: "save/Identity_303" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_304" + op: "Identity" + input: "save/RestoreV2:303" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_303" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias" + input: "save/Identity_304" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_305" + op: "Identity" + input: "save/RestoreV2:304" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_304" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_m" + input: "save/Identity_305" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_306" + op: "Identity" + input: "save/RestoreV2:305" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_305" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/bias/adam_v" + input: "save/Identity_306" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_307" + op: "Identity" + input: "save/RestoreV2:306" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_306" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel" + input: "save/Identity_307" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_308" + op: "Identity" + input: "save/RestoreV2:307" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_307" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_m" + input: "save/Identity_308" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_309" + op: "Identity" + input: "save/RestoreV2:308" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_308" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/dense/kernel/adam_v" + input: "save/Identity_309" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_310" + op: "Identity" + input: "save/RestoreV2:309" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_309" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta" + input: "save/Identity_310" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_311" + op: "Identity" + input: "save/RestoreV2:310" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_310" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_m" + input: "save/Identity_311" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_312" + op: "Identity" + input: "save/RestoreV2:311" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_311" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/beta/adam_v" + input: "save/Identity_312" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_313" + op: "Identity" + input: "save/RestoreV2:312" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_312" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma" + input: "save/Identity_313" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_314" + op: "Identity" + input: "save/RestoreV2:313" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_313" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_m" + input: "save/Identity_314" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_315" + op: "Identity" + input: "save/RestoreV2:314" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_314" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/output/layer_normalization_9/gamma/adam_v" + input: "save/Identity_315" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_316" + op: "Identity" + input: "save/RestoreV2:315" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_315" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias" + input: "save/Identity_316" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_317" + op: "Identity" + input: "save/RestoreV2:316" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_316" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_m" + input: "save/Identity_317" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_318" + op: "Identity" + input: "save/RestoreV2:317" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_317" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/key/bias/adam_v" + input: "save/Identity_318" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_319" + op: "Identity" + input: "save/RestoreV2:318" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_318" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel" + input: "save/Identity_319" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_320" + op: "Identity" + input: "save/RestoreV2:319" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_319" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_m" + input: "save/Identity_320" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_321" + op: "Identity" + input: "save/RestoreV2:320" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_320" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/key/kernel/adam_v" + input: "save/Identity_321" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_322" + op: "Identity" + input: "save/RestoreV2:321" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_321" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias" + input: "save/Identity_322" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_323" + op: "Identity" + input: "save/RestoreV2:322" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_322" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_m" + input: "save/Identity_323" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_324" + op: "Identity" + input: "save/RestoreV2:323" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_323" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/query/bias/adam_v" + input: "save/Identity_324" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_325" + op: "Identity" + input: "save/RestoreV2:324" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_324" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel" + input: "save/Identity_325" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_326" + op: "Identity" + input: "save/RestoreV2:325" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_325" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_m" + input: "save/Identity_326" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_327" + op: "Identity" + input: "save/RestoreV2:326" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_326" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/query/kernel/adam_v" + input: "save/Identity_327" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_328" + op: "Identity" + input: "save/RestoreV2:327" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_327" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias" + input: "save/Identity_328" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_329" + op: "Identity" + input: "save/RestoreV2:328" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_328" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_m" + input: "save/Identity_329" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_330" + op: "Identity" + input: "save/RestoreV2:329" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_329" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/value/bias/adam_v" + input: "save/Identity_330" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_331" + op: "Identity" + input: "save/RestoreV2:330" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_330" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel" + input: "save/Identity_331" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_332" + op: "Identity" + input: "save/RestoreV2:331" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_331" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_m" + input: "save/Identity_332" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_333" + op: "Identity" + input: "save/RestoreV2:332" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_332" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/attention/self/value/kernel/adam_v" + input: "save/Identity_333" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_334" + op: "Identity" + input: "save/RestoreV2:333" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_333" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias" + input: "save/Identity_334" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_335" + op: "Identity" + input: "save/RestoreV2:334" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_334" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_m" + input: "save/Identity_335" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_336" + op: "Identity" + input: "save/RestoreV2:335" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_335" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/bias/adam_v" + input: "save/Identity_336" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_337" + op: "Identity" + input: "save/RestoreV2:336" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_336" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel" + input: "save/Identity_337" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_338" + op: "Identity" + input: "save/RestoreV2:337" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_337" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_m" + input: "save/Identity_338" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_339" + op: "Identity" + input: "save/RestoreV2:338" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_338" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/intermediate/dense/kernel/adam_v" + input: "save/Identity_339" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_340" + op: "Identity" + input: "save/RestoreV2:339" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_339" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/dense/bias" + input: "save/Identity_340" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_341" + op: "Identity" + input: "save/RestoreV2:340" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_340" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/dense/bias/adam_m" + input: "save/Identity_341" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_342" + op: "Identity" + input: "save/RestoreV2:341" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_341" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/dense/bias/adam_v" + input: "save/Identity_342" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_343" + op: "Identity" + input: "save/RestoreV2:342" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_342" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel" + input: "save/Identity_343" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_344" + op: "Identity" + input: "save/RestoreV2:343" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_343" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel/adam_m" + input: "save/Identity_344" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_345" + op: "Identity" + input: "save/RestoreV2:344" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_344" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/dense/kernel/adam_v" + input: "save/Identity_345" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_346" + op: "Identity" + input: "save/RestoreV2:345" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_345" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta" + input: "save/Identity_346" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_347" + op: "Identity" + input: "save/RestoreV2:346" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_346" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_m" + input: "save/Identity_347" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_348" + op: "Identity" + input: "save/RestoreV2:347" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_347" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/beta/adam_v" + input: "save/Identity_348" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_349" + op: "Identity" + input: "save/RestoreV2:348" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_348" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma" + input: "save/Identity_349" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_350" + op: "Identity" + input: "save/RestoreV2:349" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_349" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_m" + input: "save/Identity_350" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_351" + op: "Identity" + input: "save/RestoreV2:350" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_350" + op: "AssignVariableOp" + input: "bert/encoder/layer_4/output/layer_normalization_10/gamma/adam_v" + input: "save/Identity_351" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_352" + op: "Identity" + input: "save/RestoreV2:351" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_351" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias" + input: "save/Identity_352" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_353" + op: "Identity" + input: "save/RestoreV2:352" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_352" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_m" + input: "save/Identity_353" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_354" + op: "Identity" + input: "save/RestoreV2:353" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_353" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/bias/adam_v" + input: "save/Identity_354" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_355" + op: "Identity" + input: "save/RestoreV2:354" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_354" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel" + input: "save/Identity_355" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_356" + op: "Identity" + input: "save/RestoreV2:355" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_355" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_m" + input: "save/Identity_356" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_357" + op: "Identity" + input: "save/RestoreV2:356" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_356" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/dense/kernel/adam_v" + input: "save/Identity_357" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_358" + op: "Identity" + input: "save/RestoreV2:357" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_357" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta" + input: "save/Identity_358" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_359" + op: "Identity" + input: "save/RestoreV2:358" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_358" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_m" + input: "save/Identity_359" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_360" + op: "Identity" + input: "save/RestoreV2:359" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_359" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/beta/adam_v" + input: "save/Identity_360" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_361" + op: "Identity" + input: "save/RestoreV2:360" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_360" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma" + input: "save/Identity_361" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_362" + op: "Identity" + input: "save/RestoreV2:361" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_361" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_m" + input: "save/Identity_362" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_363" + op: "Identity" + input: "save/RestoreV2:362" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_362" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/output/layer_normalization_11/gamma/adam_v" + input: "save/Identity_363" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_364" + op: "Identity" + input: "save/RestoreV2:363" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_363" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias" + input: "save/Identity_364" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_365" + op: "Identity" + input: "save/RestoreV2:364" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_364" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_m" + input: "save/Identity_365" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_366" + op: "Identity" + input: "save/RestoreV2:365" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_365" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/key/bias/adam_v" + input: "save/Identity_366" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_367" + op: "Identity" + input: "save/RestoreV2:366" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_366" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel" + input: "save/Identity_367" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_368" + op: "Identity" + input: "save/RestoreV2:367" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_367" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_m" + input: "save/Identity_368" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_369" + op: "Identity" + input: "save/RestoreV2:368" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_368" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/key/kernel/adam_v" + input: "save/Identity_369" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_370" + op: "Identity" + input: "save/RestoreV2:369" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_369" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias" + input: "save/Identity_370" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_371" + op: "Identity" + input: "save/RestoreV2:370" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_370" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_m" + input: "save/Identity_371" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_372" + op: "Identity" + input: "save/RestoreV2:371" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_371" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/query/bias/adam_v" + input: "save/Identity_372" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_373" + op: "Identity" + input: "save/RestoreV2:372" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_372" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel" + input: "save/Identity_373" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_374" + op: "Identity" + input: "save/RestoreV2:373" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_373" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_m" + input: "save/Identity_374" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_375" + op: "Identity" + input: "save/RestoreV2:374" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_374" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/query/kernel/adam_v" + input: "save/Identity_375" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_376" + op: "Identity" + input: "save/RestoreV2:375" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_375" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias" + input: "save/Identity_376" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_377" + op: "Identity" + input: "save/RestoreV2:376" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_376" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_m" + input: "save/Identity_377" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_378" + op: "Identity" + input: "save/RestoreV2:377" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_377" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/value/bias/adam_v" + input: "save/Identity_378" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_379" + op: "Identity" + input: "save/RestoreV2:378" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_378" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel" + input: "save/Identity_379" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_380" + op: "Identity" + input: "save/RestoreV2:379" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_379" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_m" + input: "save/Identity_380" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_381" + op: "Identity" + input: "save/RestoreV2:380" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_380" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/attention/self/value/kernel/adam_v" + input: "save/Identity_381" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_382" + op: "Identity" + input: "save/RestoreV2:381" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_381" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias" + input: "save/Identity_382" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_383" + op: "Identity" + input: "save/RestoreV2:382" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_382" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_m" + input: "save/Identity_383" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_384" + op: "Identity" + input: "save/RestoreV2:383" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_383" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/bias/adam_v" + input: "save/Identity_384" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_385" + op: "Identity" + input: "save/RestoreV2:384" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_384" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel" + input: "save/Identity_385" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_386" + op: "Identity" + input: "save/RestoreV2:385" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_385" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_m" + input: "save/Identity_386" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_387" + op: "Identity" + input: "save/RestoreV2:386" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_386" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/intermediate/dense/kernel/adam_v" + input: "save/Identity_387" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_388" + op: "Identity" + input: "save/RestoreV2:387" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_387" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/dense/bias" + input: "save/Identity_388" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_389" + op: "Identity" + input: "save/RestoreV2:388" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_388" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/dense/bias/adam_m" + input: "save/Identity_389" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_390" + op: "Identity" + input: "save/RestoreV2:389" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_389" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/dense/bias/adam_v" + input: "save/Identity_390" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_391" + op: "Identity" + input: "save/RestoreV2:390" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_390" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel" + input: "save/Identity_391" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_392" + op: "Identity" + input: "save/RestoreV2:391" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_391" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel/adam_m" + input: "save/Identity_392" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_393" + op: "Identity" + input: "save/RestoreV2:392" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_392" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/dense/kernel/adam_v" + input: "save/Identity_393" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_394" + op: "Identity" + input: "save/RestoreV2:393" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_393" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta" + input: "save/Identity_394" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_395" + op: "Identity" + input: "save/RestoreV2:394" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_394" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_m" + input: "save/Identity_395" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_396" + op: "Identity" + input: "save/RestoreV2:395" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_395" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/beta/adam_v" + input: "save/Identity_396" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_397" + op: "Identity" + input: "save/RestoreV2:396" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_396" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma" + input: "save/Identity_397" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_398" + op: "Identity" + input: "save/RestoreV2:397" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_397" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_m" + input: "save/Identity_398" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_399" + op: "Identity" + input: "save/RestoreV2:398" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_398" + op: "AssignVariableOp" + input: "bert/encoder/layer_5/output/layer_normalization_12/gamma/adam_v" + input: "save/Identity_399" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_400" + op: "Identity" + input: "save/RestoreV2:399" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_399" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias" + input: "save/Identity_400" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_401" + op: "Identity" + input: "save/RestoreV2:400" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_400" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_m" + input: "save/Identity_401" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_402" + op: "Identity" + input: "save/RestoreV2:401" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_401" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/bias/adam_v" + input: "save/Identity_402" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_403" + op: "Identity" + input: "save/RestoreV2:402" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_402" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel" + input: "save/Identity_403" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_404" + op: "Identity" + input: "save/RestoreV2:403" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_403" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_m" + input: "save/Identity_404" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_405" + op: "Identity" + input: "save/RestoreV2:404" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_404" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/dense/kernel/adam_v" + input: "save/Identity_405" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_406" + op: "Identity" + input: "save/RestoreV2:405" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_405" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta" + input: "save/Identity_406" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_407" + op: "Identity" + input: "save/RestoreV2:406" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_406" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_m" + input: "save/Identity_407" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_408" + op: "Identity" + input: "save/RestoreV2:407" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_407" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/beta/adam_v" + input: "save/Identity_408" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_409" + op: "Identity" + input: "save/RestoreV2:408" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_408" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma" + input: "save/Identity_409" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_410" + op: "Identity" + input: "save/RestoreV2:409" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_409" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_m" + input: "save/Identity_410" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_411" + op: "Identity" + input: "save/RestoreV2:410" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_410" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/output/layer_normalization_13/gamma/adam_v" + input: "save/Identity_411" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_412" + op: "Identity" + input: "save/RestoreV2:411" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_411" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias" + input: "save/Identity_412" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_413" + op: "Identity" + input: "save/RestoreV2:412" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_412" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_m" + input: "save/Identity_413" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_414" + op: "Identity" + input: "save/RestoreV2:413" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_413" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/key/bias/adam_v" + input: "save/Identity_414" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_415" + op: "Identity" + input: "save/RestoreV2:414" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_414" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel" + input: "save/Identity_415" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_416" + op: "Identity" + input: "save/RestoreV2:415" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_415" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_m" + input: "save/Identity_416" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_417" + op: "Identity" + input: "save/RestoreV2:416" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_416" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/key/kernel/adam_v" + input: "save/Identity_417" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_418" + op: "Identity" + input: "save/RestoreV2:417" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_417" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias" + input: "save/Identity_418" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_419" + op: "Identity" + input: "save/RestoreV2:418" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_418" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_m" + input: "save/Identity_419" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_420" + op: "Identity" + input: "save/RestoreV2:419" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_419" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/query/bias/adam_v" + input: "save/Identity_420" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_421" + op: "Identity" + input: "save/RestoreV2:420" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_420" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel" + input: "save/Identity_421" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_422" + op: "Identity" + input: "save/RestoreV2:421" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_421" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_m" + input: "save/Identity_422" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_423" + op: "Identity" + input: "save/RestoreV2:422" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_422" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/query/kernel/adam_v" + input: "save/Identity_423" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_424" + op: "Identity" + input: "save/RestoreV2:423" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_423" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias" + input: "save/Identity_424" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_425" + op: "Identity" + input: "save/RestoreV2:424" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_424" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_m" + input: "save/Identity_425" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_426" + op: "Identity" + input: "save/RestoreV2:425" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_425" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/value/bias/adam_v" + input: "save/Identity_426" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_427" + op: "Identity" + input: "save/RestoreV2:426" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_426" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel" + input: "save/Identity_427" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_428" + op: "Identity" + input: "save/RestoreV2:427" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_427" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_m" + input: "save/Identity_428" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_429" + op: "Identity" + input: "save/RestoreV2:428" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_428" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/attention/self/value/kernel/adam_v" + input: "save/Identity_429" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_430" + op: "Identity" + input: "save/RestoreV2:429" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_429" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias" + input: "save/Identity_430" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_431" + op: "Identity" + input: "save/RestoreV2:430" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_430" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_m" + input: "save/Identity_431" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_432" + op: "Identity" + input: "save/RestoreV2:431" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_431" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/bias/adam_v" + input: "save/Identity_432" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_433" + op: "Identity" + input: "save/RestoreV2:432" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_432" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel" + input: "save/Identity_433" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_434" + op: "Identity" + input: "save/RestoreV2:433" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_433" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_m" + input: "save/Identity_434" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_435" + op: "Identity" + input: "save/RestoreV2:434" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_434" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/intermediate/dense/kernel/adam_v" + input: "save/Identity_435" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_436" + op: "Identity" + input: "save/RestoreV2:435" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_435" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/dense/bias" + input: "save/Identity_436" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_437" + op: "Identity" + input: "save/RestoreV2:436" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_436" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/dense/bias/adam_m" + input: "save/Identity_437" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_438" + op: "Identity" + input: "save/RestoreV2:437" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_437" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/dense/bias/adam_v" + input: "save/Identity_438" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_439" + op: "Identity" + input: "save/RestoreV2:438" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_438" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel" + input: "save/Identity_439" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_440" + op: "Identity" + input: "save/RestoreV2:439" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_439" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel/adam_m" + input: "save/Identity_440" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_441" + op: "Identity" + input: "save/RestoreV2:440" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_440" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/dense/kernel/adam_v" + input: "save/Identity_441" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_442" + op: "Identity" + input: "save/RestoreV2:441" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_441" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta" + input: "save/Identity_442" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_443" + op: "Identity" + input: "save/RestoreV2:442" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_442" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_m" + input: "save/Identity_443" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_444" + op: "Identity" + input: "save/RestoreV2:443" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_443" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/beta/adam_v" + input: "save/Identity_444" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_445" + op: "Identity" + input: "save/RestoreV2:444" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_444" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma" + input: "save/Identity_445" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_446" + op: "Identity" + input: "save/RestoreV2:445" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_445" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_m" + input: "save/Identity_446" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_447" + op: "Identity" + input: "save/RestoreV2:446" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_446" + op: "AssignVariableOp" + input: "bert/encoder/layer_6/output/layer_normalization_14/gamma/adam_v" + input: "save/Identity_447" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_448" + op: "Identity" + input: "save/RestoreV2:447" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_447" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias" + input: "save/Identity_448" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_449" + op: "Identity" + input: "save/RestoreV2:448" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_448" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_m" + input: "save/Identity_449" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_450" + op: "Identity" + input: "save/RestoreV2:449" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_449" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/bias/adam_v" + input: "save/Identity_450" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_451" + op: "Identity" + input: "save/RestoreV2:450" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_450" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel" + input: "save/Identity_451" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_452" + op: "Identity" + input: "save/RestoreV2:451" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_451" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_m" + input: "save/Identity_452" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_453" + op: "Identity" + input: "save/RestoreV2:452" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_452" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/dense/kernel/adam_v" + input: "save/Identity_453" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_454" + op: "Identity" + input: "save/RestoreV2:453" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_453" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta" + input: "save/Identity_454" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_455" + op: "Identity" + input: "save/RestoreV2:454" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_454" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_m" + input: "save/Identity_455" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_456" + op: "Identity" + input: "save/RestoreV2:455" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_455" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/beta/adam_v" + input: "save/Identity_456" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_457" + op: "Identity" + input: "save/RestoreV2:456" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_456" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma" + input: "save/Identity_457" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_458" + op: "Identity" + input: "save/RestoreV2:457" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_457" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_m" + input: "save/Identity_458" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_459" + op: "Identity" + input: "save/RestoreV2:458" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_458" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/output/layer_normalization_15/gamma/adam_v" + input: "save/Identity_459" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_460" + op: "Identity" + input: "save/RestoreV2:459" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_459" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias" + input: "save/Identity_460" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_461" + op: "Identity" + input: "save/RestoreV2:460" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_460" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_m" + input: "save/Identity_461" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_462" + op: "Identity" + input: "save/RestoreV2:461" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_461" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/key/bias/adam_v" + input: "save/Identity_462" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_463" + op: "Identity" + input: "save/RestoreV2:462" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_462" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel" + input: "save/Identity_463" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_464" + op: "Identity" + input: "save/RestoreV2:463" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_463" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_m" + input: "save/Identity_464" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_465" + op: "Identity" + input: "save/RestoreV2:464" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_464" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/key/kernel/adam_v" + input: "save/Identity_465" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_466" + op: "Identity" + input: "save/RestoreV2:465" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_465" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias" + input: "save/Identity_466" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_467" + op: "Identity" + input: "save/RestoreV2:466" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_466" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_m" + input: "save/Identity_467" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_468" + op: "Identity" + input: "save/RestoreV2:467" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_467" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/query/bias/adam_v" + input: "save/Identity_468" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_469" + op: "Identity" + input: "save/RestoreV2:468" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_468" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel" + input: "save/Identity_469" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_470" + op: "Identity" + input: "save/RestoreV2:469" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_469" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_m" + input: "save/Identity_470" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_471" + op: "Identity" + input: "save/RestoreV2:470" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_470" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/query/kernel/adam_v" + input: "save/Identity_471" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_472" + op: "Identity" + input: "save/RestoreV2:471" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_471" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias" + input: "save/Identity_472" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_473" + op: "Identity" + input: "save/RestoreV2:472" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_472" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_m" + input: "save/Identity_473" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_474" + op: "Identity" + input: "save/RestoreV2:473" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_473" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/value/bias/adam_v" + input: "save/Identity_474" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_475" + op: "Identity" + input: "save/RestoreV2:474" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_474" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel" + input: "save/Identity_475" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_476" + op: "Identity" + input: "save/RestoreV2:475" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_475" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_m" + input: "save/Identity_476" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_477" + op: "Identity" + input: "save/RestoreV2:476" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_476" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/attention/self/value/kernel/adam_v" + input: "save/Identity_477" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_478" + op: "Identity" + input: "save/RestoreV2:477" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_477" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias" + input: "save/Identity_478" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_479" + op: "Identity" + input: "save/RestoreV2:478" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_478" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_m" + input: "save/Identity_479" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_480" + op: "Identity" + input: "save/RestoreV2:479" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_479" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/bias/adam_v" + input: "save/Identity_480" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_481" + op: "Identity" + input: "save/RestoreV2:480" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_480" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel" + input: "save/Identity_481" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_482" + op: "Identity" + input: "save/RestoreV2:481" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_481" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_m" + input: "save/Identity_482" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_483" + op: "Identity" + input: "save/RestoreV2:482" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_482" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/intermediate/dense/kernel/adam_v" + input: "save/Identity_483" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_484" + op: "Identity" + input: "save/RestoreV2:483" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_483" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/dense/bias" + input: "save/Identity_484" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_485" + op: "Identity" + input: "save/RestoreV2:484" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_484" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/dense/bias/adam_m" + input: "save/Identity_485" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_486" + op: "Identity" + input: "save/RestoreV2:485" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_485" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/dense/bias/adam_v" + input: "save/Identity_486" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_487" + op: "Identity" + input: "save/RestoreV2:486" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_486" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel" + input: "save/Identity_487" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_488" + op: "Identity" + input: "save/RestoreV2:487" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_487" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel/adam_m" + input: "save/Identity_488" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_489" + op: "Identity" + input: "save/RestoreV2:488" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_488" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/dense/kernel/adam_v" + input: "save/Identity_489" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_490" + op: "Identity" + input: "save/RestoreV2:489" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_489" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta" + input: "save/Identity_490" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_491" + op: "Identity" + input: "save/RestoreV2:490" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_490" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_m" + input: "save/Identity_491" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_492" + op: "Identity" + input: "save/RestoreV2:491" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_491" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/beta/adam_v" + input: "save/Identity_492" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_493" + op: "Identity" + input: "save/RestoreV2:492" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_492" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma" + input: "save/Identity_493" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_494" + op: "Identity" + input: "save/RestoreV2:493" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_493" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_m" + input: "save/Identity_494" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_495" + op: "Identity" + input: "save/RestoreV2:494" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_494" + op: "AssignVariableOp" + input: "bert/encoder/layer_7/output/layer_normalization_16/gamma/adam_v" + input: "save/Identity_495" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_496" + op: "Identity" + input: "save/RestoreV2:495" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_495" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias" + input: "save/Identity_496" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_497" + op: "Identity" + input: "save/RestoreV2:496" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_496" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_m" + input: "save/Identity_497" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_498" + op: "Identity" + input: "save/RestoreV2:497" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_497" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/bias/adam_v" + input: "save/Identity_498" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_499" + op: "Identity" + input: "save/RestoreV2:498" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_498" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel" + input: "save/Identity_499" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_500" + op: "Identity" + input: "save/RestoreV2:499" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_499" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_m" + input: "save/Identity_500" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_501" + op: "Identity" + input: "save/RestoreV2:500" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_500" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/dense/kernel/adam_v" + input: "save/Identity_501" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_502" + op: "Identity" + input: "save/RestoreV2:501" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_501" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta" + input: "save/Identity_502" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_503" + op: "Identity" + input: "save/RestoreV2:502" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_502" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_m" + input: "save/Identity_503" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_504" + op: "Identity" + input: "save/RestoreV2:503" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_503" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/beta/adam_v" + input: "save/Identity_504" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_505" + op: "Identity" + input: "save/RestoreV2:504" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_504" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma" + input: "save/Identity_505" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_506" + op: "Identity" + input: "save/RestoreV2:505" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_505" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_m" + input: "save/Identity_506" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_507" + op: "Identity" + input: "save/RestoreV2:506" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_506" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/output/layer_normalization_17/gamma/adam_v" + input: "save/Identity_507" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_508" + op: "Identity" + input: "save/RestoreV2:507" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_507" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias" + input: "save/Identity_508" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_509" + op: "Identity" + input: "save/RestoreV2:508" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_508" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_m" + input: "save/Identity_509" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_510" + op: "Identity" + input: "save/RestoreV2:509" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_509" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/key/bias/adam_v" + input: "save/Identity_510" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_511" + op: "Identity" + input: "save/RestoreV2:510" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_510" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel" + input: "save/Identity_511" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_512" + op: "Identity" + input: "save/RestoreV2:511" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_511" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_m" + input: "save/Identity_512" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_513" + op: "Identity" + input: "save/RestoreV2:512" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_512" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/key/kernel/adam_v" + input: "save/Identity_513" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_514" + op: "Identity" + input: "save/RestoreV2:513" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_513" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias" + input: "save/Identity_514" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_515" + op: "Identity" + input: "save/RestoreV2:514" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_514" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_m" + input: "save/Identity_515" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_516" + op: "Identity" + input: "save/RestoreV2:515" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_515" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/query/bias/adam_v" + input: "save/Identity_516" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_517" + op: "Identity" + input: "save/RestoreV2:516" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_516" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel" + input: "save/Identity_517" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_518" + op: "Identity" + input: "save/RestoreV2:517" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_517" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_m" + input: "save/Identity_518" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_519" + op: "Identity" + input: "save/RestoreV2:518" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_518" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/query/kernel/adam_v" + input: "save/Identity_519" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_520" + op: "Identity" + input: "save/RestoreV2:519" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_519" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias" + input: "save/Identity_520" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_521" + op: "Identity" + input: "save/RestoreV2:520" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_520" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_m" + input: "save/Identity_521" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_522" + op: "Identity" + input: "save/RestoreV2:521" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_521" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/value/bias/adam_v" + input: "save/Identity_522" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_523" + op: "Identity" + input: "save/RestoreV2:522" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_522" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel" + input: "save/Identity_523" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_524" + op: "Identity" + input: "save/RestoreV2:523" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_523" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_m" + input: "save/Identity_524" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_525" + op: "Identity" + input: "save/RestoreV2:524" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_524" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/attention/self/value/kernel/adam_v" + input: "save/Identity_525" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_526" + op: "Identity" + input: "save/RestoreV2:525" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_525" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias" + input: "save/Identity_526" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_527" + op: "Identity" + input: "save/RestoreV2:526" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_526" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_m" + input: "save/Identity_527" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_528" + op: "Identity" + input: "save/RestoreV2:527" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_527" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/bias/adam_v" + input: "save/Identity_528" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_529" + op: "Identity" + input: "save/RestoreV2:528" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_528" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel" + input: "save/Identity_529" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_530" + op: "Identity" + input: "save/RestoreV2:529" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_529" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_m" + input: "save/Identity_530" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_531" + op: "Identity" + input: "save/RestoreV2:530" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_530" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/intermediate/dense/kernel/adam_v" + input: "save/Identity_531" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_532" + op: "Identity" + input: "save/RestoreV2:531" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_531" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/dense/bias" + input: "save/Identity_532" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_533" + op: "Identity" + input: "save/RestoreV2:532" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_532" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/dense/bias/adam_m" + input: "save/Identity_533" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_534" + op: "Identity" + input: "save/RestoreV2:533" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_533" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/dense/bias/adam_v" + input: "save/Identity_534" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_535" + op: "Identity" + input: "save/RestoreV2:534" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_534" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel" + input: "save/Identity_535" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_536" + op: "Identity" + input: "save/RestoreV2:535" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_535" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel/adam_m" + input: "save/Identity_536" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_537" + op: "Identity" + input: "save/RestoreV2:536" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_536" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/dense/kernel/adam_v" + input: "save/Identity_537" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_538" + op: "Identity" + input: "save/RestoreV2:537" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_537" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta" + input: "save/Identity_538" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_539" + op: "Identity" + input: "save/RestoreV2:538" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_538" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_m" + input: "save/Identity_539" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_540" + op: "Identity" + input: "save/RestoreV2:539" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_539" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/beta/adam_v" + input: "save/Identity_540" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_541" + op: "Identity" + input: "save/RestoreV2:540" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_540" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma" + input: "save/Identity_541" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_542" + op: "Identity" + input: "save/RestoreV2:541" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_541" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_m" + input: "save/Identity_542" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_543" + op: "Identity" + input: "save/RestoreV2:542" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_542" + op: "AssignVariableOp" + input: "bert/encoder/layer_8/output/layer_normalization_18/gamma/adam_v" + input: "save/Identity_543" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_544" + op: "Identity" + input: "save/RestoreV2:543" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_543" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias" + input: "save/Identity_544" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_545" + op: "Identity" + input: "save/RestoreV2:544" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_544" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_m" + input: "save/Identity_545" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_546" + op: "Identity" + input: "save/RestoreV2:545" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_545" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/bias/adam_v" + input: "save/Identity_546" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_547" + op: "Identity" + input: "save/RestoreV2:546" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_546" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel" + input: "save/Identity_547" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_548" + op: "Identity" + input: "save/RestoreV2:547" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_547" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_m" + input: "save/Identity_548" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_549" + op: "Identity" + input: "save/RestoreV2:548" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_548" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/dense/kernel/adam_v" + input: "save/Identity_549" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_550" + op: "Identity" + input: "save/RestoreV2:549" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_549" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta" + input: "save/Identity_550" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_551" + op: "Identity" + input: "save/RestoreV2:550" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_550" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_m" + input: "save/Identity_551" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_552" + op: "Identity" + input: "save/RestoreV2:551" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_551" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/beta/adam_v" + input: "save/Identity_552" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_553" + op: "Identity" + input: "save/RestoreV2:552" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_552" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma" + input: "save/Identity_553" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_554" + op: "Identity" + input: "save/RestoreV2:553" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_553" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_m" + input: "save/Identity_554" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_555" + op: "Identity" + input: "save/RestoreV2:554" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_554" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/output/layer_normalization_19/gamma/adam_v" + input: "save/Identity_555" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_556" + op: "Identity" + input: "save/RestoreV2:555" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_555" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias" + input: "save/Identity_556" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_557" + op: "Identity" + input: "save/RestoreV2:556" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_556" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_m" + input: "save/Identity_557" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_558" + op: "Identity" + input: "save/RestoreV2:557" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_557" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/key/bias/adam_v" + input: "save/Identity_558" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_559" + op: "Identity" + input: "save/RestoreV2:558" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_558" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel" + input: "save/Identity_559" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_560" + op: "Identity" + input: "save/RestoreV2:559" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_559" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_m" + input: "save/Identity_560" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_561" + op: "Identity" + input: "save/RestoreV2:560" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_560" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/key/kernel/adam_v" + input: "save/Identity_561" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_562" + op: "Identity" + input: "save/RestoreV2:561" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_561" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias" + input: "save/Identity_562" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_563" + op: "Identity" + input: "save/RestoreV2:562" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_562" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_m" + input: "save/Identity_563" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_564" + op: "Identity" + input: "save/RestoreV2:563" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_563" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/query/bias/adam_v" + input: "save/Identity_564" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_565" + op: "Identity" + input: "save/RestoreV2:564" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_564" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel" + input: "save/Identity_565" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_566" + op: "Identity" + input: "save/RestoreV2:565" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_565" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_m" + input: "save/Identity_566" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_567" + op: "Identity" + input: "save/RestoreV2:566" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_566" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/query/kernel/adam_v" + input: "save/Identity_567" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_568" + op: "Identity" + input: "save/RestoreV2:567" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_567" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias" + input: "save/Identity_568" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_569" + op: "Identity" + input: "save/RestoreV2:568" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_568" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_m" + input: "save/Identity_569" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_570" + op: "Identity" + input: "save/RestoreV2:569" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_569" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/value/bias/adam_v" + input: "save/Identity_570" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_571" + op: "Identity" + input: "save/RestoreV2:570" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_570" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel" + input: "save/Identity_571" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_572" + op: "Identity" + input: "save/RestoreV2:571" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_571" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_m" + input: "save/Identity_572" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_573" + op: "Identity" + input: "save/RestoreV2:572" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_572" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/attention/self/value/kernel/adam_v" + input: "save/Identity_573" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_574" + op: "Identity" + input: "save/RestoreV2:573" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_573" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias" + input: "save/Identity_574" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_575" + op: "Identity" + input: "save/RestoreV2:574" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_574" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_m" + input: "save/Identity_575" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_576" + op: "Identity" + input: "save/RestoreV2:575" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_575" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/bias/adam_v" + input: "save/Identity_576" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_577" + op: "Identity" + input: "save/RestoreV2:576" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_576" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel" + input: "save/Identity_577" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_578" + op: "Identity" + input: "save/RestoreV2:577" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_577" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_m" + input: "save/Identity_578" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_579" + op: "Identity" + input: "save/RestoreV2:578" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_578" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/intermediate/dense/kernel/adam_v" + input: "save/Identity_579" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_580" + op: "Identity" + input: "save/RestoreV2:579" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_579" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/dense/bias" + input: "save/Identity_580" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_581" + op: "Identity" + input: "save/RestoreV2:580" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_580" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/dense/bias/adam_m" + input: "save/Identity_581" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_582" + op: "Identity" + input: "save/RestoreV2:581" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_581" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/dense/bias/adam_v" + input: "save/Identity_582" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_583" + op: "Identity" + input: "save/RestoreV2:582" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_582" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel" + input: "save/Identity_583" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_584" + op: "Identity" + input: "save/RestoreV2:583" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_583" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel/adam_m" + input: "save/Identity_584" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_585" + op: "Identity" + input: "save/RestoreV2:584" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_584" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/dense/kernel/adam_v" + input: "save/Identity_585" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_586" + op: "Identity" + input: "save/RestoreV2:585" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_585" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta" + input: "save/Identity_586" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_587" + op: "Identity" + input: "save/RestoreV2:586" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_586" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_m" + input: "save/Identity_587" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_588" + op: "Identity" + input: "save/RestoreV2:587" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_587" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/beta/adam_v" + input: "save/Identity_588" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_589" + op: "Identity" + input: "save/RestoreV2:588" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_588" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma" + input: "save/Identity_589" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_590" + op: "Identity" + input: "save/RestoreV2:589" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_589" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_m" + input: "save/Identity_590" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_591" + op: "Identity" + input: "save/RestoreV2:590" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_590" + op: "AssignVariableOp" + input: "bert/encoder/layer_9/output/layer_normalization_20/gamma/adam_v" + input: "save/Identity_591" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_592" + op: "Identity" + input: "save/RestoreV2:591" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_591" + op: "AssignVariableOp" + input: "bert/pooler/dense/bias" + input: "save/Identity_592" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_593" + op: "Identity" + input: "save/RestoreV2:592" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_592" + op: "AssignVariableOp" + input: "bert/pooler/dense/kernel" + input: "save/Identity_593" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_594" + op: "Identity" + input: "save/RestoreV2:593" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_593" + op: "AssignVariableOp" + input: "cls/squad/output_bias" + input: "save/Identity_594" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_595" + op: "Identity" + input: "save/RestoreV2:594" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_594" + op: "AssignVariableOp" + input: "cls/squad/output_bias/adam_m" + input: "save/Identity_595" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_596" + op: "Identity" + input: "save/RestoreV2:595" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_595" + op: "AssignVariableOp" + input: "cls/squad/output_bias/adam_v" + input: "save/Identity_596" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_597" + op: "Identity" + input: "save/RestoreV2:596" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_596" + op: "AssignVariableOp" + input: "cls/squad/output_weights" + input: "save/Identity_597" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_598" + op: "Identity" + input: "save/RestoreV2:597" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_597" + op: "AssignVariableOp" + input: "cls/squad/output_weights/adam_m" + input: "save/Identity_598" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_599" + op: "Identity" + input: "save/RestoreV2:598" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_598" + op: "AssignVariableOp" + input: "cls/squad/output_weights/adam_v" + input: "save/Identity_599" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/Identity_600" + op: "Identity" + input: "save/RestoreV2:599" + attr { + key: "T" + value { + type: DT_INT64 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + unknown_rank: true + } + } + } + } +} +node { + name: "save/AssignVariableOp_599" + op: "AssignVariableOp" + input: "global_step" + input: "save/Identity_600" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + attr { + key: "validate_shape" + value { + b: false + } + } +} +node { + name: "save/restore_shard" + op: "NoOp" + input: "^save/AssignVariableOp" + input: "^save/AssignVariableOp_1" + input: "^save/AssignVariableOp_10" + input: "^save/AssignVariableOp_100" + input: "^save/AssignVariableOp_101" + input: "^save/AssignVariableOp_102" + input: "^save/AssignVariableOp_103" + input: "^save/AssignVariableOp_104" + input: "^save/AssignVariableOp_105" + input: "^save/AssignVariableOp_106" + input: "^save/AssignVariableOp_107" + input: "^save/AssignVariableOp_108" + input: "^save/AssignVariableOp_109" + input: "^save/AssignVariableOp_11" + input: "^save/AssignVariableOp_110" + input: "^save/AssignVariableOp_111" + input: "^save/AssignVariableOp_112" + input: "^save/AssignVariableOp_113" + input: "^save/AssignVariableOp_114" + input: "^save/AssignVariableOp_115" + input: "^save/AssignVariableOp_116" + input: "^save/AssignVariableOp_117" + input: "^save/AssignVariableOp_118" + input: "^save/AssignVariableOp_119" + input: "^save/AssignVariableOp_12" + input: "^save/AssignVariableOp_120" + input: "^save/AssignVariableOp_121" + input: "^save/AssignVariableOp_122" + input: "^save/AssignVariableOp_123" + input: "^save/AssignVariableOp_124" + input: "^save/AssignVariableOp_125" + input: "^save/AssignVariableOp_126" + input: "^save/AssignVariableOp_127" + input: "^save/AssignVariableOp_128" + input: "^save/AssignVariableOp_129" + input: "^save/AssignVariableOp_13" + input: "^save/AssignVariableOp_130" + input: "^save/AssignVariableOp_131" + input: "^save/AssignVariableOp_132" + input: "^save/AssignVariableOp_133" + input: "^save/AssignVariableOp_134" + input: "^save/AssignVariableOp_135" + input: "^save/AssignVariableOp_136" + input: "^save/AssignVariableOp_137" + input: "^save/AssignVariableOp_138" + input: "^save/AssignVariableOp_139" + input: "^save/AssignVariableOp_14" + input: "^save/AssignVariableOp_140" + input: "^save/AssignVariableOp_141" + input: "^save/AssignVariableOp_142" + input: "^save/AssignVariableOp_143" + input: "^save/AssignVariableOp_144" + input: "^save/AssignVariableOp_145" + input: "^save/AssignVariableOp_146" + input: "^save/AssignVariableOp_147" + input: "^save/AssignVariableOp_148" + input: "^save/AssignVariableOp_149" + input: "^save/AssignVariableOp_15" + input: "^save/AssignVariableOp_150" + input: "^save/AssignVariableOp_151" + input: "^save/AssignVariableOp_152" + input: "^save/AssignVariableOp_153" + input: "^save/AssignVariableOp_154" + input: "^save/AssignVariableOp_155" + input: "^save/AssignVariableOp_156" + input: "^save/AssignVariableOp_157" + input: "^save/AssignVariableOp_158" + input: "^save/AssignVariableOp_159" + input: "^save/AssignVariableOp_16" + input: "^save/AssignVariableOp_160" + input: "^save/AssignVariableOp_161" + input: "^save/AssignVariableOp_162" + input: "^save/AssignVariableOp_163" + input: "^save/AssignVariableOp_164" + input: "^save/AssignVariableOp_165" + input: "^save/AssignVariableOp_166" + input: "^save/AssignVariableOp_167" + input: "^save/AssignVariableOp_168" + input: "^save/AssignVariableOp_169" + input: "^save/AssignVariableOp_17" + input: "^save/AssignVariableOp_170" + input: "^save/AssignVariableOp_171" + input: "^save/AssignVariableOp_172" + input: "^save/AssignVariableOp_173" + input: "^save/AssignVariableOp_174" + input: "^save/AssignVariableOp_175" + input: "^save/AssignVariableOp_176" + input: "^save/AssignVariableOp_177" + input: "^save/AssignVariableOp_178" + input: "^save/AssignVariableOp_179" + input: "^save/AssignVariableOp_18" + input: "^save/AssignVariableOp_180" + input: "^save/AssignVariableOp_181" + input: "^save/AssignVariableOp_182" + input: "^save/AssignVariableOp_183" + input: "^save/AssignVariableOp_184" + input: "^save/AssignVariableOp_185" + input: "^save/AssignVariableOp_186" + input: "^save/AssignVariableOp_187" + input: "^save/AssignVariableOp_188" + input: "^save/AssignVariableOp_189" + input: "^save/AssignVariableOp_19" + input: "^save/AssignVariableOp_190" + input: "^save/AssignVariableOp_191" + input: "^save/AssignVariableOp_192" + input: "^save/AssignVariableOp_193" + input: "^save/AssignVariableOp_194" + input: "^save/AssignVariableOp_195" + input: "^save/AssignVariableOp_196" + input: "^save/AssignVariableOp_197" + input: "^save/AssignVariableOp_198" + input: "^save/AssignVariableOp_199" + input: "^save/AssignVariableOp_2" + input: "^save/AssignVariableOp_20" + input: "^save/AssignVariableOp_200" + input: "^save/AssignVariableOp_201" + input: "^save/AssignVariableOp_202" + input: "^save/AssignVariableOp_203" + input: "^save/AssignVariableOp_204" + input: "^save/AssignVariableOp_205" + input: "^save/AssignVariableOp_206" + input: "^save/AssignVariableOp_207" + input: "^save/AssignVariableOp_208" + input: "^save/AssignVariableOp_209" + input: "^save/AssignVariableOp_21" + input: "^save/AssignVariableOp_210" + input: "^save/AssignVariableOp_211" + input: "^save/AssignVariableOp_212" + input: "^save/AssignVariableOp_213" + input: "^save/AssignVariableOp_214" + input: "^save/AssignVariableOp_215" + input: "^save/AssignVariableOp_216" + input: "^save/AssignVariableOp_217" + input: "^save/AssignVariableOp_218" + input: "^save/AssignVariableOp_219" + input: "^save/AssignVariableOp_22" + input: "^save/AssignVariableOp_220" + input: "^save/AssignVariableOp_221" + input: "^save/AssignVariableOp_222" + input: "^save/AssignVariableOp_223" + input: "^save/AssignVariableOp_224" + input: "^save/AssignVariableOp_225" + input: "^save/AssignVariableOp_226" + input: "^save/AssignVariableOp_227" + input: "^save/AssignVariableOp_228" + input: "^save/AssignVariableOp_229" + input: "^save/AssignVariableOp_23" + input: "^save/AssignVariableOp_230" + input: "^save/AssignVariableOp_231" + input: "^save/AssignVariableOp_232" + input: "^save/AssignVariableOp_233" + input: "^save/AssignVariableOp_234" + input: "^save/AssignVariableOp_235" + input: "^save/AssignVariableOp_236" + input: "^save/AssignVariableOp_237" + input: "^save/AssignVariableOp_238" + input: "^save/AssignVariableOp_239" + input: "^save/AssignVariableOp_24" + input: "^save/AssignVariableOp_240" + input: "^save/AssignVariableOp_241" + input: "^save/AssignVariableOp_242" + input: "^save/AssignVariableOp_243" + input: "^save/AssignVariableOp_244" + input: "^save/AssignVariableOp_245" + input: "^save/AssignVariableOp_246" + input: "^save/AssignVariableOp_247" + input: "^save/AssignVariableOp_248" + input: "^save/AssignVariableOp_249" + input: "^save/AssignVariableOp_25" + input: "^save/AssignVariableOp_250" + input: "^save/AssignVariableOp_251" + input: "^save/AssignVariableOp_252" + input: "^save/AssignVariableOp_253" + input: "^save/AssignVariableOp_254" + input: "^save/AssignVariableOp_255" + input: "^save/AssignVariableOp_256" + input: "^save/AssignVariableOp_257" + input: "^save/AssignVariableOp_258" + input: "^save/AssignVariableOp_259" + input: "^save/AssignVariableOp_26" + input: "^save/AssignVariableOp_260" + input: "^save/AssignVariableOp_261" + input: "^save/AssignVariableOp_262" + input: "^save/AssignVariableOp_263" + input: "^save/AssignVariableOp_264" + input: "^save/AssignVariableOp_265" + input: "^save/AssignVariableOp_266" + input: "^save/AssignVariableOp_267" + input: "^save/AssignVariableOp_268" + input: "^save/AssignVariableOp_269" + input: "^save/AssignVariableOp_27" + input: "^save/AssignVariableOp_270" + input: "^save/AssignVariableOp_271" + input: "^save/AssignVariableOp_272" + input: "^save/AssignVariableOp_273" + input: "^save/AssignVariableOp_274" + input: "^save/AssignVariableOp_275" + input: "^save/AssignVariableOp_276" + input: "^save/AssignVariableOp_277" + input: "^save/AssignVariableOp_278" + input: "^save/AssignVariableOp_279" + input: "^save/AssignVariableOp_28" + input: "^save/AssignVariableOp_280" + input: "^save/AssignVariableOp_281" + input: "^save/AssignVariableOp_282" + input: "^save/AssignVariableOp_283" + input: "^save/AssignVariableOp_284" + input: "^save/AssignVariableOp_285" + input: "^save/AssignVariableOp_286" + input: "^save/AssignVariableOp_287" + input: "^save/AssignVariableOp_288" + input: "^save/AssignVariableOp_289" + input: "^save/AssignVariableOp_29" + input: "^save/AssignVariableOp_290" + input: "^save/AssignVariableOp_291" + input: "^save/AssignVariableOp_292" + input: "^save/AssignVariableOp_293" + input: "^save/AssignVariableOp_294" + input: "^save/AssignVariableOp_295" + input: "^save/AssignVariableOp_296" + input: "^save/AssignVariableOp_297" + input: "^save/AssignVariableOp_298" + input: "^save/AssignVariableOp_299" + input: "^save/AssignVariableOp_3" + input: "^save/AssignVariableOp_30" + input: "^save/AssignVariableOp_300" + input: "^save/AssignVariableOp_301" + input: "^save/AssignVariableOp_302" + input: "^save/AssignVariableOp_303" + input: "^save/AssignVariableOp_304" + input: "^save/AssignVariableOp_305" + input: "^save/AssignVariableOp_306" + input: "^save/AssignVariableOp_307" + input: "^save/AssignVariableOp_308" + input: "^save/AssignVariableOp_309" + input: "^save/AssignVariableOp_31" + input: "^save/AssignVariableOp_310" + input: "^save/AssignVariableOp_311" + input: "^save/AssignVariableOp_312" + input: "^save/AssignVariableOp_313" + input: "^save/AssignVariableOp_314" + input: "^save/AssignVariableOp_315" + input: "^save/AssignVariableOp_316" + input: "^save/AssignVariableOp_317" + input: "^save/AssignVariableOp_318" + input: "^save/AssignVariableOp_319" + input: "^save/AssignVariableOp_32" + input: "^save/AssignVariableOp_320" + input: "^save/AssignVariableOp_321" + input: "^save/AssignVariableOp_322" + input: "^save/AssignVariableOp_323" + input: "^save/AssignVariableOp_324" + input: "^save/AssignVariableOp_325" + input: "^save/AssignVariableOp_326" + input: "^save/AssignVariableOp_327" + input: "^save/AssignVariableOp_328" + input: "^save/AssignVariableOp_329" + input: "^save/AssignVariableOp_33" + input: "^save/AssignVariableOp_330" + input: "^save/AssignVariableOp_331" + input: "^save/AssignVariableOp_332" + input: "^save/AssignVariableOp_333" + input: "^save/AssignVariableOp_334" + input: "^save/AssignVariableOp_335" + input: "^save/AssignVariableOp_336" + input: "^save/AssignVariableOp_337" + input: "^save/AssignVariableOp_338" + input: "^save/AssignVariableOp_339" + input: "^save/AssignVariableOp_34" + input: "^save/AssignVariableOp_340" + input: "^save/AssignVariableOp_341" + input: "^save/AssignVariableOp_342" + input: "^save/AssignVariableOp_343" + input: "^save/AssignVariableOp_344" + input: "^save/AssignVariableOp_345" + input: "^save/AssignVariableOp_346" + input: "^save/AssignVariableOp_347" + input: "^save/AssignVariableOp_348" + input: "^save/AssignVariableOp_349" + input: "^save/AssignVariableOp_35" + input: "^save/AssignVariableOp_350" + input: "^save/AssignVariableOp_351" + input: "^save/AssignVariableOp_352" + input: "^save/AssignVariableOp_353" + input: "^save/AssignVariableOp_354" + input: "^save/AssignVariableOp_355" + input: "^save/AssignVariableOp_356" + input: "^save/AssignVariableOp_357" + input: "^save/AssignVariableOp_358" + input: "^save/AssignVariableOp_359" + input: "^save/AssignVariableOp_36" + input: "^save/AssignVariableOp_360" + input: "^save/AssignVariableOp_361" + input: "^save/AssignVariableOp_362" + input: "^save/AssignVariableOp_363" + input: "^save/AssignVariableOp_364" + input: "^save/AssignVariableOp_365" + input: "^save/AssignVariableOp_366" + input: "^save/AssignVariableOp_367" + input: "^save/AssignVariableOp_368" + input: "^save/AssignVariableOp_369" + input: "^save/AssignVariableOp_37" + input: "^save/AssignVariableOp_370" + input: "^save/AssignVariableOp_371" + input: "^save/AssignVariableOp_372" + input: "^save/AssignVariableOp_373" + input: "^save/AssignVariableOp_374" + input: "^save/AssignVariableOp_375" + input: "^save/AssignVariableOp_376" + input: "^save/AssignVariableOp_377" + input: "^save/AssignVariableOp_378" + input: "^save/AssignVariableOp_379" + input: "^save/AssignVariableOp_38" + input: "^save/AssignVariableOp_380" + input: "^save/AssignVariableOp_381" + input: "^save/AssignVariableOp_382" + input: "^save/AssignVariableOp_383" + input: "^save/AssignVariableOp_384" + input: "^save/AssignVariableOp_385" + input: "^save/AssignVariableOp_386" + input: "^save/AssignVariableOp_387" + input: "^save/AssignVariableOp_388" + input: "^save/AssignVariableOp_389" + input: "^save/AssignVariableOp_39" + input: "^save/AssignVariableOp_390" + input: "^save/AssignVariableOp_391" + input: "^save/AssignVariableOp_392" + input: "^save/AssignVariableOp_393" + input: "^save/AssignVariableOp_394" + input: "^save/AssignVariableOp_395" + input: "^save/AssignVariableOp_396" + input: "^save/AssignVariableOp_397" + input: "^save/AssignVariableOp_398" + input: "^save/AssignVariableOp_399" + input: "^save/AssignVariableOp_4" + input: "^save/AssignVariableOp_40" + input: "^save/AssignVariableOp_400" + input: "^save/AssignVariableOp_401" + input: "^save/AssignVariableOp_402" + input: "^save/AssignVariableOp_403" + input: "^save/AssignVariableOp_404" + input: "^save/AssignVariableOp_405" + input: "^save/AssignVariableOp_406" + input: "^save/AssignVariableOp_407" + input: "^save/AssignVariableOp_408" + input: "^save/AssignVariableOp_409" + input: "^save/AssignVariableOp_41" + input: "^save/AssignVariableOp_410" + input: "^save/AssignVariableOp_411" + input: "^save/AssignVariableOp_412" + input: "^save/AssignVariableOp_413" + input: "^save/AssignVariableOp_414" + input: "^save/AssignVariableOp_415" + input: "^save/AssignVariableOp_416" + input: "^save/AssignVariableOp_417" + input: "^save/AssignVariableOp_418" + input: "^save/AssignVariableOp_419" + input: "^save/AssignVariableOp_42" + input: "^save/AssignVariableOp_420" + input: "^save/AssignVariableOp_421" + input: "^save/AssignVariableOp_422" + input: "^save/AssignVariableOp_423" + input: "^save/AssignVariableOp_424" + input: "^save/AssignVariableOp_425" + input: "^save/AssignVariableOp_426" + input: "^save/AssignVariableOp_427" + input: "^save/AssignVariableOp_428" + input: "^save/AssignVariableOp_429" + input: "^save/AssignVariableOp_43" + input: "^save/AssignVariableOp_430" + input: "^save/AssignVariableOp_431" + input: "^save/AssignVariableOp_432" + input: "^save/AssignVariableOp_433" + input: "^save/AssignVariableOp_434" + input: "^save/AssignVariableOp_435" + input: "^save/AssignVariableOp_436" + input: "^save/AssignVariableOp_437" + input: "^save/AssignVariableOp_438" + input: "^save/AssignVariableOp_439" + input: "^save/AssignVariableOp_44" + input: "^save/AssignVariableOp_440" + input: "^save/AssignVariableOp_441" + input: "^save/AssignVariableOp_442" + input: "^save/AssignVariableOp_443" + input: "^save/AssignVariableOp_444" + input: "^save/AssignVariableOp_445" + input: "^save/AssignVariableOp_446" + input: "^save/AssignVariableOp_447" + input: "^save/AssignVariableOp_448" + input: "^save/AssignVariableOp_449" + input: "^save/AssignVariableOp_45" + input: "^save/AssignVariableOp_450" + input: "^save/AssignVariableOp_451" + input: "^save/AssignVariableOp_452" + input: "^save/AssignVariableOp_453" + input: "^save/AssignVariableOp_454" + input: "^save/AssignVariableOp_455" + input: "^save/AssignVariableOp_456" + input: "^save/AssignVariableOp_457" + input: "^save/AssignVariableOp_458" + input: "^save/AssignVariableOp_459" + input: "^save/AssignVariableOp_46" + input: "^save/AssignVariableOp_460" + input: "^save/AssignVariableOp_461" + input: "^save/AssignVariableOp_462" + input: "^save/AssignVariableOp_463" + input: "^save/AssignVariableOp_464" + input: "^save/AssignVariableOp_465" + input: "^save/AssignVariableOp_466" + input: "^save/AssignVariableOp_467" + input: "^save/AssignVariableOp_468" + input: "^save/AssignVariableOp_469" + input: "^save/AssignVariableOp_47" + input: "^save/AssignVariableOp_470" + input: "^save/AssignVariableOp_471" + input: "^save/AssignVariableOp_472" + input: "^save/AssignVariableOp_473" + input: "^save/AssignVariableOp_474" + input: "^save/AssignVariableOp_475" + input: "^save/AssignVariableOp_476" + input: "^save/AssignVariableOp_477" + input: "^save/AssignVariableOp_478" + input: "^save/AssignVariableOp_479" + input: "^save/AssignVariableOp_48" + input: "^save/AssignVariableOp_480" + input: "^save/AssignVariableOp_481" + input: "^save/AssignVariableOp_482" + input: "^save/AssignVariableOp_483" + input: "^save/AssignVariableOp_484" + input: "^save/AssignVariableOp_485" + input: "^save/AssignVariableOp_486" + input: "^save/AssignVariableOp_487" + input: "^save/AssignVariableOp_488" + input: "^save/AssignVariableOp_489" + input: "^save/AssignVariableOp_49" + input: "^save/AssignVariableOp_490" + input: "^save/AssignVariableOp_491" + input: "^save/AssignVariableOp_492" + input: "^save/AssignVariableOp_493" + input: "^save/AssignVariableOp_494" + input: "^save/AssignVariableOp_495" + input: "^save/AssignVariableOp_496" + input: "^save/AssignVariableOp_497" + input: "^save/AssignVariableOp_498" + input: "^save/AssignVariableOp_499" + input: "^save/AssignVariableOp_5" + input: "^save/AssignVariableOp_50" + input: "^save/AssignVariableOp_500" + input: "^save/AssignVariableOp_501" + input: "^save/AssignVariableOp_502" + input: "^save/AssignVariableOp_503" + input: "^save/AssignVariableOp_504" + input: "^save/AssignVariableOp_505" + input: "^save/AssignVariableOp_506" + input: "^save/AssignVariableOp_507" + input: "^save/AssignVariableOp_508" + input: "^save/AssignVariableOp_509" + input: "^save/AssignVariableOp_51" + input: "^save/AssignVariableOp_510" + input: "^save/AssignVariableOp_511" + input: "^save/AssignVariableOp_512" + input: "^save/AssignVariableOp_513" + input: "^save/AssignVariableOp_514" + input: "^save/AssignVariableOp_515" + input: "^save/AssignVariableOp_516" + input: "^save/AssignVariableOp_517" + input: "^save/AssignVariableOp_518" + input: "^save/AssignVariableOp_519" + input: "^save/AssignVariableOp_52" + input: "^save/AssignVariableOp_520" + input: "^save/AssignVariableOp_521" + input: "^save/AssignVariableOp_522" + input: "^save/AssignVariableOp_523" + input: "^save/AssignVariableOp_524" + input: "^save/AssignVariableOp_525" + input: "^save/AssignVariableOp_526" + input: "^save/AssignVariableOp_527" + input: "^save/AssignVariableOp_528" + input: "^save/AssignVariableOp_529" + input: "^save/AssignVariableOp_53" + input: "^save/AssignVariableOp_530" + input: "^save/AssignVariableOp_531" + input: "^save/AssignVariableOp_532" + input: "^save/AssignVariableOp_533" + input: "^save/AssignVariableOp_534" + input: "^save/AssignVariableOp_535" + input: "^save/AssignVariableOp_536" + input: "^save/AssignVariableOp_537" + input: "^save/AssignVariableOp_538" + input: "^save/AssignVariableOp_539" + input: "^save/AssignVariableOp_54" + input: "^save/AssignVariableOp_540" + input: "^save/AssignVariableOp_541" + input: "^save/AssignVariableOp_542" + input: "^save/AssignVariableOp_543" + input: "^save/AssignVariableOp_544" + input: "^save/AssignVariableOp_545" + input: "^save/AssignVariableOp_546" + input: "^save/AssignVariableOp_547" + input: "^save/AssignVariableOp_548" + input: "^save/AssignVariableOp_549" + input: "^save/AssignVariableOp_55" + input: "^save/AssignVariableOp_550" + input: "^save/AssignVariableOp_551" + input: "^save/AssignVariableOp_552" + input: "^save/AssignVariableOp_553" + input: "^save/AssignVariableOp_554" + input: "^save/AssignVariableOp_555" + input: "^save/AssignVariableOp_556" + input: "^save/AssignVariableOp_557" + input: "^save/AssignVariableOp_558" + input: "^save/AssignVariableOp_559" + input: "^save/AssignVariableOp_56" + input: "^save/AssignVariableOp_560" + input: "^save/AssignVariableOp_561" + input: "^save/AssignVariableOp_562" + input: "^save/AssignVariableOp_563" + input: "^save/AssignVariableOp_564" + input: "^save/AssignVariableOp_565" + input: "^save/AssignVariableOp_566" + input: "^save/AssignVariableOp_567" + input: "^save/AssignVariableOp_568" + input: "^save/AssignVariableOp_569" + input: "^save/AssignVariableOp_57" + input: "^save/AssignVariableOp_570" + input: "^save/AssignVariableOp_571" + input: "^save/AssignVariableOp_572" + input: "^save/AssignVariableOp_573" + input: "^save/AssignVariableOp_574" + input: "^save/AssignVariableOp_575" + input: "^save/AssignVariableOp_576" + input: "^save/AssignVariableOp_577" + input: "^save/AssignVariableOp_578" + input: "^save/AssignVariableOp_579" + input: "^save/AssignVariableOp_58" + input: "^save/AssignVariableOp_580" + input: "^save/AssignVariableOp_581" + input: "^save/AssignVariableOp_582" + input: "^save/AssignVariableOp_583" + input: "^save/AssignVariableOp_584" + input: "^save/AssignVariableOp_585" + input: "^save/AssignVariableOp_586" + input: "^save/AssignVariableOp_587" + input: "^save/AssignVariableOp_588" + input: "^save/AssignVariableOp_589" + input: "^save/AssignVariableOp_59" + input: "^save/AssignVariableOp_590" + input: "^save/AssignVariableOp_591" + input: "^save/AssignVariableOp_592" + input: "^save/AssignVariableOp_593" + input: "^save/AssignVariableOp_594" + input: "^save/AssignVariableOp_595" + input: "^save/AssignVariableOp_596" + input: "^save/AssignVariableOp_597" + input: "^save/AssignVariableOp_598" + input: "^save/AssignVariableOp_599" + input: "^save/AssignVariableOp_6" + input: "^save/AssignVariableOp_60" + input: "^save/AssignVariableOp_61" + input: "^save/AssignVariableOp_62" + input: "^save/AssignVariableOp_63" + input: "^save/AssignVariableOp_64" + input: "^save/AssignVariableOp_65" + input: "^save/AssignVariableOp_66" + input: "^save/AssignVariableOp_67" + input: "^save/AssignVariableOp_68" + input: "^save/AssignVariableOp_69" + input: "^save/AssignVariableOp_7" + input: "^save/AssignVariableOp_70" + input: "^save/AssignVariableOp_71" + input: "^save/AssignVariableOp_72" + input: "^save/AssignVariableOp_73" + input: "^save/AssignVariableOp_74" + input: "^save/AssignVariableOp_75" + input: "^save/AssignVariableOp_76" + input: "^save/AssignVariableOp_77" + input: "^save/AssignVariableOp_78" + input: "^save/AssignVariableOp_79" + input: "^save/AssignVariableOp_8" + input: "^save/AssignVariableOp_80" + input: "^save/AssignVariableOp_81" + input: "^save/AssignVariableOp_82" + input: "^save/AssignVariableOp_83" + input: "^save/AssignVariableOp_84" + input: "^save/AssignVariableOp_85" + input: "^save/AssignVariableOp_86" + input: "^save/AssignVariableOp_87" + input: "^save/AssignVariableOp_88" + input: "^save/AssignVariableOp_89" + input: "^save/AssignVariableOp_9" + input: "^save/AssignVariableOp_90" + input: "^save/AssignVariableOp_91" + input: "^save/AssignVariableOp_92" + input: "^save/AssignVariableOp_93" + input: "^save/AssignVariableOp_94" + input: "^save/AssignVariableOp_95" + input: "^save/AssignVariableOp_96" + input: "^save/AssignVariableOp_97" + input: "^save/AssignVariableOp_98" + input: "^save/AssignVariableOp_99" + attr { + key: "_has_manual_control_dependencies" + value { + b: true + } + } +} +node { + name: "save/restore_all" + op: "NoOp" + input: "^save/restore_shard" +} +library { + function { + signature { + name: "__inference_Dataset_flat_map_read_one_file_30" + input_arg { + name: "args_0" + type: DT_STRING + } + output_arg { + name: "identity" + type: DT_VARIANT + } + is_stateful: true + control_output: "TFRecordDataset" + } + node_def { + name: "compression_type" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + } + string_val: "" + } + } + } + } + node_def { + name: "buffer_size" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT64 + tensor_shape { + } + int64_val: 262144 + } + } + } + } + node_def { + name: "TFRecordDataset" + op: "TFRecordDataset" + input: "args_0" + input: "compression_type:output:0" + input: "buffer_size:output:0" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "metadata" + value { + s: "" + } + } + experimental_type { + type_id: TFT_PRODUCT + args { + type_id: TFT_DATASET + args { + type_id: TFT_TENSOR + args { + type_id: TFT_STRING + } + } + } + } + } + node_def { + name: "Identity" + op: "Identity" + input: "TFRecordDataset:handle:0" + input: "^NoOp" + attr { + key: "T" + value { + type: DT_VARIANT + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + } + node_def { + name: "NoOp" + op: "NoOp" + input: "^TFRecordDataset" + attr { + key: "_acd_function_control_output" + value { + b: true + } + } + attr { + key: "_output_shapes" + value { + list { + } + } + } + } + ret { + key: "identity" + value: "Identity:output:0" + } + attr { + key: "_input_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "_tf_data_function" + value { + b: true + } + } + control_ret { + key: "TFRecordDataset" + value: "TFRecordDataset" + } + arg_attr { + value { + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "_user_specified_name" + value { + s: "args_0" + } + } + } + } + } + function { + signature { + name: "global_step_cond_true_6" + input_arg { + name: "read_readvariableop_global_step" + type: DT_RESOURCE + handle_data { + dtype: DT_INT64 + shape { + } + } + } + input_arg { + name: "placeholder" + type: DT_INT64 + } + output_arg { + name: "identity" + type: DT_INT64 + } + is_stateful: true + } + node_def { + name: "Read/ReadVariableOp" + op: "ReadVariableOp" + input: "read_readvariableop_global_step" + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + } + node_def { + name: "Identity" + op: "Identity" + input: "Read/ReadVariableOp:value:0" + attr { + key: "T" + value { + type: DT_INT64 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + } + ret { + key: "identity" + value: "Identity:output:0" + } + attr { + key: "_input_shapes" + value { + list { + shape { + } + shape { + } + } + } + } + arg_attr { + key: 1 + value { + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + } + } + } + function { + signature { + name: "__inference_tf_data_experimental_map_and_batch_lambda_68" + input_arg { + name: "args_0" + type: DT_STRING + } + output_arg { + name: "identity" + type: DT_INT32 + } + output_arg { + name: "identity_1" + type: DT_INT32 + } + output_arg { + name: "identity_2" + type: DT_INT32 + } + output_arg { + name: "identity_3" + type: DT_INT32 + } + output_arg { + name: "identity_4" + type: DT_INT32 + } + output_arg { + name: "identity_5" + type: DT_INT32 + } + } + node_def { + name: "ParseSingleExample/ParseExample/Const" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT64 + tensor_shape { + dim { + } + } + } + } + } + } + node_def { + name: "ParseSingleExample/ParseExample/Const_1" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT64 + tensor_shape { + dim { + } + } + } + } + } + } + node_def { + name: "ParseSingleExample/ParseExample/Const_2" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT64 + tensor_shape { + dim { + } + } + } + } + } + } + node_def { + name: "ParseSingleExample/ParseExample/Const_3" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT64 + tensor_shape { + dim { + } + } + } + } + } + } + node_def { + name: "ParseSingleExample/ParseExample/Const_4" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT64 + tensor_shape { + dim { + } + } + } + } + } + } + node_def { + name: "ParseSingleExample/ParseExample/Const_5" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_INT64 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT64 + tensor_shape { + dim { + } + } + } + } + } + } + node_def { + name: "ParseSingleExample/ParseExample/ParseExampleV2/names" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + } + } + } + } + } + } + node_def { + name: "ParseSingleExample/ParseExample/ParseExampleV2/sparse_keys" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + } + } + } + } + } + } + node_def { + name: "ParseSingleExample/ParseExample/ParseExampleV2/dense_keys" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 6 + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 6 + } + } + string_val: "end_positions" + string_val: "input_ids" + string_val: "input_mask" + string_val: "segment_ids" + string_val: "start_positions" + string_val: "unique_ids" + } + } + } + } + node_def { + name: "ParseSingleExample/ParseExample/ParseExampleV2/ragged_keys" + op: "Const" + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + } + } + } + } + } + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + } + } + } + } + } + } + node_def { + name: "ParseSingleExample/ParseExample/ParseExampleV2" + op: "ParseExampleV2" + input: "args_0" + input: "ParseSingleExample/ParseExample/ParseExampleV2/names:output:0" + input: "ParseSingleExample/ParseExample/ParseExampleV2/sparse_keys:output:0" + input: "ParseSingleExample/ParseExample/ParseExampleV2/dense_keys:output:0" + input: "ParseSingleExample/ParseExample/ParseExampleV2/ragged_keys:output:0" + input: "ParseSingleExample/ParseExample/Const:output:0" + input: "ParseSingleExample/ParseExample/Const_1:output:0" + input: "ParseSingleExample/ParseExample/Const_2:output:0" + input: "ParseSingleExample/ParseExample/Const_3:output:0" + input: "ParseSingleExample/ParseExample/Const_4:output:0" + input: "ParseSingleExample/ParseExample/Const_5:output:0" + attr { + key: "Tdense" + value { + list { + type: DT_INT64 + type: DT_INT64 + type: DT_INT64 + type: DT_INT64 + type: DT_INT64 + type: DT_INT64 + } + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + shape { + dim { + size: 384 + } + } + shape { + dim { + size: 384 + } + } + shape { + dim { + size: 384 + } + } + shape { + } + shape { + } + } + } + } + attr { + key: "dense_shapes" + value { + list { + shape { + } + shape { + dim { + size: 384 + } + } + shape { + dim { + size: 384 + } + } + shape { + dim { + size: 384 + } + } + shape { + } + shape { + } + } + } + } + attr { + key: "num_sparse" + value { + i: 0 + } + } + attr { + key: "ragged_split_types" + value { + list { + } + } + } + attr { + key: "ragged_value_types" + value { + list { + } + } + } + attr { + key: "sparse_types" + value { + list { + } + } + } + } + node_def { + name: "Cast" + op: "Cast" + input: "ParseSingleExample/ParseExample/ParseExampleV2:dense_values:0" + attr { + key: "DstT" + value { + type: DT_INT32 + } + } + attr { + key: "SrcT" + value { + type: DT_INT64 + } + } + attr { + key: "Truncate" + value { + b: false + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + } + node_def { + name: "Cast_1" + op: "Cast" + input: "ParseSingleExample/ParseExample/ParseExampleV2:dense_values:1" + attr { + key: "DstT" + value { + type: DT_INT32 + } + } + attr { + key: "SrcT" + value { + type: DT_INT64 + } + } + attr { + key: "Truncate" + value { + b: false + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 384 + } + } + } + } + } + } + node_def { + name: "Cast_2" + op: "Cast" + input: "ParseSingleExample/ParseExample/ParseExampleV2:dense_values:2" + attr { + key: "DstT" + value { + type: DT_INT32 + } + } + attr { + key: "SrcT" + value { + type: DT_INT64 + } + } + attr { + key: "Truncate" + value { + b: false + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 384 + } + } + } + } + } + } + node_def { + name: "Cast_3" + op: "Cast" + input: "ParseSingleExample/ParseExample/ParseExampleV2:dense_values:3" + attr { + key: "DstT" + value { + type: DT_INT32 + } + } + attr { + key: "SrcT" + value { + type: DT_INT64 + } + } + attr { + key: "Truncate" + value { + b: false + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 384 + } + } + } + } + } + } + node_def { + name: "Cast_4" + op: "Cast" + input: "ParseSingleExample/ParseExample/ParseExampleV2:dense_values:4" + attr { + key: "DstT" + value { + type: DT_INT32 + } + } + attr { + key: "SrcT" + value { + type: DT_INT64 + } + } + attr { + key: "Truncate" + value { + b: false + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + } + node_def { + name: "Cast_5" + op: "Cast" + input: "ParseSingleExample/ParseExample/ParseExampleV2:dense_values:5" + attr { + key: "DstT" + value { + type: DT_INT32 + } + } + attr { + key: "SrcT" + value { + type: DT_INT64 + } + } + attr { + key: "Truncate" + value { + b: false + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + } + node_def { + name: "Identity" + op: "Identity" + input: "Cast:y:0" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + } + node_def { + name: "Identity_1" + op: "Identity" + input: "Cast_1:y:0" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 384 + } + } + } + } + } + } + node_def { + name: "Identity_2" + op: "Identity" + input: "Cast_2:y:0" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 384 + } + } + } + } + } + } + node_def { + name: "Identity_3" + op: "Identity" + input: "Cast_3:y:0" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + dim { + size: 384 + } + } + } + } + } + } + node_def { + name: "Identity_4" + op: "Identity" + input: "Cast_4:y:0" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + } + node_def { + name: "Identity_5" + op: "Identity" + input: "Cast_5:y:0" + attr { + key: "T" + value { + type: DT_INT32 + } + } + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + } + ret { + key: "identity" + value: "Identity:output:0" + } + ret { + key: "identity_1" + value: "Identity_1:output:0" + } + ret { + key: "identity_2" + value: "Identity_2:output:0" + } + ret { + key: "identity_3" + value: "Identity_3:output:0" + } + ret { + key: "identity_4" + value: "Identity_4:output:0" + } + ret { + key: "identity_5" + value: "Identity_5:output:0" + } + attr { + key: "_input_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "_tf_data_function" + value { + b: true + } + } + arg_attr { + value { + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + attr { + key: "_user_specified_name" + value { + s: "args_0" + } + } + } + } + } + function { + signature { + name: "global_step_cond_false_7" + input_arg { + name: "placeholder" + type: DT_RESOURCE + } + input_arg { + name: "global_step_initializer_zeros_0" + type: DT_INT64 + } + output_arg { + name: "global_step_initializer_zeros" + type: DT_INT64 + } + } + ret { + key: "global_step_initializer_zeros" + value: "global_step_initializer_zeros_0" + } + attr { + key: "_input_shapes" + value { + list { + shape { + } + shape { + } + } + } + } + arg_attr { + key: 1 + value { + attr { + key: "_output_shapes" + value { + list { + shape { + } + } + } + } + } + } + } +} +versions { + producer: 1286 + min_consumer: 12 +} diff --git a/models/cifar_net.pth b/models/cifar_net.pth new file mode 100644 index 0000000..427eaa5 Binary files /dev/null and b/models/cifar_net.pth differ diff --git a/models/mnist_cnn.pt b/models/mnist_cnn.pt new file mode 100644 index 0000000..ce17772 Binary files /dev/null and b/models/mnist_cnn.pt differ diff --git a/models/resnet_saved_model/model_0.pth b/models/resnet_saved_model/model_0.pth new file mode 100644 index 0000000..6e47da1 Binary files /dev/null and b/models/resnet_saved_model/model_0.pth differ diff --git a/parallel-GA.sh b/parallel-GA.sh new file mode 100644 index 0000000..0e4a447 --- /dev/null +++ b/parallel-GA.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + + +# TODO + +cmd1=$1 +cmd2=$2 +echo $cmd1 +echo $cmd2 + + +# ${cmd1} 2>&1 | tee $cmd2 & TASK=$! +${cmd1} & TASK=$! +python UTIL.py $cmd2 & QUERY=$! +wait $TASK +wait $QUERY + + + +# Previous commands directly inside experiments.sh: + # ${cmd} & TASK=$! + # python UTIL.py ${opt_path} & QUERY=$! + # wait $TASK + # wait $QUERY + # 2>&1 | tee ${log_file}.txt \ No newline at end of file diff --git a/parallel-TAPO.sh b/parallel-TAPO.sh new file mode 100644 index 0000000..ca73f09 --- /dev/null +++ b/parallel-TAPO.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +# TAPORUN="False" +# echo $TAPORUN +# echo 'here' + +cmd1=$1 +cmd2=$2 +# cmd3="python TAPO.py ${cmd2}" + + +# ${cmd1} 2>&1 | tee $cmd2 & TASK=$! +${cmd1} & TASK=$! +python TAPO.py $cmd2 & QUERY=$! +wait $TASK +wait $QUERY + + + +# Previous commands directly inside experiments.sh: + # ${cmd} & TASK=$! + # python TAPO.py ${opt_path} & QUERY=$! + # wait $TASK + # wait $QUERY + # 2>&1 | tee ${log_file}.txt \ No newline at end of file diff --git a/requirements/pip-freeze-requirements-nlp.txt b/requirements/pip-freeze-requirements-nlp.txt new file mode 100644 index 0000000..056892a --- /dev/null +++ b/requirements/pip-freeze-requirements-nlp.txt @@ -0,0 +1,69 @@ +absl-py==1.4.0 +APScheduler==3.10.1 +arrow==1.2.3 +astunparse==1.6.3 +backports.zoneinfo==0.2.1 +cachetools==5.3.0 +carbontracker==1.1.7 +certifi==2024.7.4 +charset-normalizer==3.3.2 +click==8.1.3 +codecarbon==2.2.1 +decorator==5.1.1 +eco2ai==0.3.9 +flatbuffers==23.5.9 +future==0.18.3 +fuzzywuzzy==0.18.0 +gast==0.4.0 +geocoder==1.38.1 +google-auth==2.18.0 +google-auth-oauthlib==0.4.6 +google-pasta==0.2.0 +GPUtil==1.4.0 +grpcio==1.54.2 +h5py==3.8.0 +idna==3.7 +importlib-metadata==6.6.0 +keras==2.11.0 +libclang==16.0.0 +Markdown==3.4.3 +MarkupSafe==2.1.2 +numpy==1.21.6 +nvidia-cublas-cu11==11.11.3.6 +nvidia-cudnn-cu11==8.9.1.23 +oauthlib==3.2.2 +opt-einsum==3.3.0 +packaging==23.1 +pandas==1.3.5 +pkcs7==0.1.2 +protobuf==3.19.6 +psutil==5.9.5 +py-cpuinfo==9.0.0 +pyasn1==0.5.0 +pyasn1-modules==0.3.0 +pycryptodome==3.20.0 +pynvml==11.5.0 +PyP100 @ git+https://github.com/almottier/TapoP100.git@04c452fd8af9d8505e4177ba3df157372ce3ee0c +python-dateutil==2.8.2 +pytz==2023.3 +ratelim==0.1.6 +requests==2.31.0 +requests-oauthlib==1.3.1 +rsa==4.9 +six==1.16.0 +tensorboard==2.11.2 +tensorboard-data-server==0.6.1 +tensorboard-plugin-wit==1.8.1 +tensorflow==2.11.0 +tensorflow-estimator==2.11.0 +tensorflow-io-gcs-filesystem==0.32.0 +termcolor==2.3.0 +tf-slim==1.1.0 +tornado==6.2 +tqdm==4.65.0 +typing_extensions==4.5.0 +tzlocal==5.0.1 +urllib3 +Werkzeug==2.2.3 +wrapt==1.15.0 +zipp==3.15.0 diff --git a/requirements/pip-freeze-requirements-vision.txt b/requirements/pip-freeze-requirements-vision.txt new file mode 100644 index 0000000..5c165a7 --- /dev/null +++ b/requirements/pip-freeze-requirements-vision.txt @@ -0,0 +1,51 @@ +APScheduler==3.10.0 +arrow==1.2.3 +carbontracker==1.1.7 +certifi==2023.11.17 +charset-normalizer==3.3.2 +click==8.1.3 +codecarbon==2.1.4 +contourpy==1.0.7 +cycler==0.11.0 +decorator==5.1.1 +eco2ai==0.3.5 +fonttools==4.38.0 +future==0.18.3 +fuzzywuzzy==0.18.0 +geocoder==1.38.1 +GPUtil==1.4.0 +idna==3.6 +importlib-metadata==6.0.0 +joblib==1.2.0 +kiwisolver==1.4.4 +matplotlib==3.6.3 +numpy==1.24.1 +packaging==23.0 +pandas==1.4.3 +Pillow==9.4.0 +pkcs7==0.1.2 +psutil==5.9.4 +py-cpuinfo==9.0.0 +pycryptodome==3.19.0 +pynvml==11.4.1 +PyP100 @ git+https://github.com/almottier/TapoP100.git@778123747e31f1cdea2f069fecb0ae84b536bf08 +pyparsing==3.0.9 +python-dateutil==2.8.2 +pytz==2022.7.1 +pytz-deprecation-shim==0.1.0.post0 +ratelim==0.1.6 +requests==2.31.0 +scikit-learn==1.2.0 +scipy==1.10.0 +six==1.16.0 +thop==0.1.1.post2209072238 +threadpoolctl==3.1.0 +torch==1.13.1+cu116 +torchvision==0.14.1+cu116 +tornado==6.2 +tqdm==4.64.1 +typing_extensions==4.4.0 +tzdata==2022.7 +tzlocal==4.2 +urllib3==2.1.0 +zipp==3.12.0 diff --git a/requirements/requirements.txt b/requirements/requirements.txt new file mode 100644 index 0000000..b39b1d1 --- /dev/null +++ b/requirements/requirements.txt @@ -0,0 +1,17 @@ +carbontracker +codecarbon +eco2ai +GPUtil +numpy +pandas +psutil +pynvml +requests +scikit-learn +tqdm + +thop + +PyP100 + +matplotlib \ No newline at end of file diff --git a/requirements/requirements_nlp.txt b/requirements/requirements_nlp.txt new file mode 100644 index 0000000..64e83a6 --- /dev/null +++ b/requirements/requirements_nlp.txt @@ -0,0 +1,17 @@ +carbontracker==1.1.7 +codecarbon==2.2.1 +eco2ai==0.3.9 +GPUtil==1.4.0 + +numpy==1.21.6 +pandas==1.3.5 + +psutil==5.9.5 +pynvml==11.5.0 + +requests==2.31.0 +scikit-learn==1.2.0 + +tqdm==4.64.1 + +PyP100==0.1.4 \ No newline at end of file diff --git a/requirements/requirements_vision.txt b/requirements/requirements_vision.txt new file mode 100644 index 0000000..3140666 --- /dev/null +++ b/requirements/requirements_vision.txt @@ -0,0 +1,20 @@ +carbontracker==1.1.7 +codecarbon==2.1.4 +eco2ai==0.3.5 +GPUtil==1.4.0 + +matplotlib==3.6.3 +numpy==1.24.1 +pandas==1.4.3 + +psutil==5.9.4 +pynvml==11.4.1 + +requests==2.28.1 +scikit-learn==1.2.0 + +tqdm==4.64.1 + +thop==0.1.1.post2209072238 + +PyP100==0.1.4 \ No newline at end of file diff --git a/templates/TAPO-credentials.json b/templates/TAPO-credentials.json new file mode 100644 index 0000000..cffada5 --- /dev/null +++ b/templates/TAPO-credentials.json @@ -0,0 +1,5 @@ +{ + "email": "", + "password": "", + "ip": "" +} \ No newline at end of file